var/home/core/zuul-output/0000755000175000017500000000000015134407453014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134437444015502 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000441247415134437346020300 0ustar corecore>rikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD >"mv?_eGbuuțx{w7ݭ7֫g^|1Fr_?c^*߶E٬:rv筼ح_y~̎+\/_p/Bj^ֻ]Eo^O/(_/V?,<']_kmN:`Si{ C2i1Gdē _%Kٻւ(Ĩ$#TLX h~lys%v6:SFA֗f΀QՇ2Kݙ$ӎ;IXN :7sL0x.`6)ɚL}ӄ]C }I4Vv@%٘e#dc0Fn 촂iHSr`岮X7̝4?qKf, # qe䧤 ss]QzH.ad!rJBi`V +|i}}THW{y|*/BP3m3A- ZPmN^iL[NrrݝE)~QGGAj^3}wy/{47[q)&c(޸0"$5ڪҾη*t:%?vEmO5tqÜ3Cyu '~qlN?}|nLFR6f8yWxYd ;K44|CK4UQviYDZh$#*)e\W$IAT;s0Gp}=9ڠedۜ+EaH#QtDV:?7#w4r_۾8ZJ%PgS!][5ߜQZ݇~- MR9z_Z;57xh|_/CWuU%v[_((G yMi@'3Pmz8~Y >hl%}Р`sMC77Aztԝp ,}Nptt%q6& ND lM;ָPZGa(X(2*91n,50/mx'})')SĔv}S%xhRe)a@r AF' ]J)ӨbqMWNjʵ2PK-guZZg !M)a(!H/?R?Q~}% ;]/ľv%T&hoP~(*טj=dߛ_SRzSa™:']*}EXɧM<@:jʨΨrPE%NT&1H>g":ͨ ҄v`tYoTq&OzcP_k(PJ'ήYXFgGہwħkIM*򸆔l=q VJީ#b8&RgX2qBMoN w1ђZGd m 2P/Ɛ!" aGd;0RZ+ 9O5KiPc7CDG.b~?|ђP? -8%JNIt"`HP!]ZrͰ4j8!*(jPcǷ!)'xmv>!0[r_G{j 6JYǹ>zs;tc.mctie:x&"bR4S uV8/0%X8Ua0NET݃jYAT` &AD]Ax95mvXYs"(A+/_+*{b }@UP*5ì"M|܊W7|}N{mL=d]' =MS2[3(/hoj$=Zm Mlh>P>Qwf8*c4˥Ęk(+,«.c%_~&^%80=1Jgͤ39(&ʤdH0Ζ@.!)CGt?~=ˢ>f>\bN<Ⱦtë{{b2hKNh`0=/9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶvhu|8Qz:^S-7;k>U~H><~5i ˿7^0*]h,*aklVIKS7d'qAWEݰLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uRz[2(A1ZK(O5dc}QQufCdX($0j(HX_$GZaPo|P5q @3ǟ6 mR!c/24مQNֆ^n,hU֝cfT :):[gCa?\&IpW$8!+Uph*/ o/{")qq҈78݇hA sTB*F$6 2C` |ɧJ~iM cO;m#NV?d?TCg5otޔC1s`u.EkB6ga׬9J2&vV,./ӐoQJ*Dw*^sCeyWtɖ9F.[-cʚmD (QMW`zP~n"U'8%kEq*Lr;TY *BCCpJhxUpܺDoGdlaQ&8#v| (~~yZ-VW"T- 0@?lm$K/$s_. WM]̍"W%`lO2-"ew@E=%VO"d.wEр%}5zWˬQOS)ZbF p$^(2JцQImuzhpyXڈ2ͤh}/[g1ieQ*-=hiך5J))?' c9*%WyΈ W\Of[=߰+ednU$YD',jߎW&7DXǜߍG`DbE#0Y4&|޻xѷ\;_Z^sнM\&+1gWo'Y;l>V ̍"ޛ4tO,{=hFѓ$b =D(zn;Y<1x~SJ^{vn 9 j1шk'L"cE=K]A(oQ۲6+ktwLzG,87^ 9H\yqū1)\(v8pHA"ΈGVp"c ?Z)hm.2;sl$瓴ӘIe~H|.Y#C^SJĽHǀeTwvy"v܅ ]?22R.lQPa ˆSܫ1z.x62%z].`Gn&*7bd+, Z`ͲH-nမ^WbPFtOfD]c9\w+ea~~{;Vm >|WAޭi`HbIãE{%&4]Iw Wjoru ݜmKnZ<X; ۢ( nx K8.|DXb +*598;w)zp:̊~;͞)6vnM!N5Cu!8Wq/`FUwWAֻ,Qu W@ Fi:K [Av*_958]a:pmQ&'ᚡmi@ zF(n&P;)_]µ!doR0`pl`~9Fk[ٺ+4Hhao-jϸ??R<lb#P-^39T|L /~p│x@Bq"M/lja\b݋af LnU*P(8W[U6WX ZoѶ^SH:K:%Qvl\b FqQI.ȨHWo;Nw$͹O$oEE-eq=.*Dp,V;(bgJ!gF)892sw*+{[or@x,))[o新#.͞.;=fc<)((b۲Eumw峛M2,V[cm,S~ AF~.2v?JNt=O7^r.@DEuU1}g$>8ac#sĢB\PIPfwJQJ;Qxm &GBf\ZA$Ba-z|A-I @x70 晪MV)m8[6-Te@`E|=U D(C{oVa*H7MQK"<O%MTTtx袥:\kfeMuVMy̥Q\ګ1F#șcq##rI$I.im򯚪+}2Q14S`XPL`-$G޽*}w[ #j*ٚ- DIAm<==UF^BcAw`g*7R(#ғ [K&#Mp'XގL=s5Ǜ>Y+yn~F8I !6WB3C%X)ybLFB%X2U6vw8uUF+X|YukXxVO(+gIQp؎Z{TcR@MSRδ~+1æ|mq՗5$B᲋eY(|*磎\Dži`dZe j'V!Mu@ KV{XץF .Jg< ƜINs:b zĄu3=Az4 u5'og^s7`Rzu-anOIq;6z( rx߅ euPvIɦ7聀t>G;_H;2ʗ6 h6QװxmR JQUbTP2j˔Ni)C)HKE"$ӝ!@2<Bq 2oh80,kNA7,?ע|tC3.㤣TiHEIǢƅaeGF$ u2`d)/-st{E1kٌS*#¦۵_Vu3ЩpRIDr/TxF8g4sѓ{%w .ʕ+84ztT:eEK[[;0(1Q@ET0>@wY)aL5ׄӫ A^%f+[`sb˟(]m`F3 W((!5F-9]dDqL&RΖd}})7 k11 K ;%v'_3 dG8d t#MTU']h7^)O>?~?_ȿM4ə#a&Xi`O}6a-xm`8@;of,![0-7 4f kUy:M֖Esa./zʕy[/ݩqz2¼&'QxJE{cZ7C:?pM z*"#窾+ HsOt۩%͟A498SwWv|jNQ=-[ӓImjX( Q&ډ}IN/[=3=XH$Ϭ$@8JlE=;ʰ6IyEJ船/|.1٦@ڗJQ&շy»Q}U-jbkmA B̠YkIP0HM9AieC" >E }qŗ9>2Ȑfk ޜ6UxTHzy ̋&\ByE5]2z!YY5<|!"7 ƕ<>:j +d*Y֐ine kB=dn᠇̕w%Yd.L ׯ/q>o.?u_[eyE~=ʘLRu5x9궩E{tXeHdei>QcF{P_Ou\Lca +x0^Ȼ 4-ɜ^ICz#a]wK ě2{+XCG9*pAG kBc6y'믇JB)aM1!I!c! -k5 \G~jre7r:-`:nR</r`kC%X$ax՞fd>$а/W.kx}TkȘ=tn ǬV`]NZZ 7㘹 <9|AW`| ÚBb Gb2Z5-5`ޜ 'LZy_t<%(eH##BgJ}L%C|EOT"$z@OfoO9_܇ߥ+W{r3Uj(g!{:wDû#YnqE7F0g$=8Jz8kc39YH:^=ֺԕ %&gY'h:MGQ.P{*et>v0_('?JgYg]NzPA0O,L:fٚD)aUD2fI0P23dɻn:POz =c%b Ȝ[LʚC}+zl$Sn =ocRvWDQtIǒe_$3j= TuY76̫, AO,Nm`Y7, 5;pvMX+M]oPxG>*KU[<e@* &%{J×Ӿ|@+`w ˨5\Jfdr7y8J)3xxƷ A5xN =k$*źяo6?/,xov=-= j@D=9%"-Vm#9_)ldX{Qh4jFEb"o;](ePd=Pd#EPrȻ[ayq (x5kRs)KP_r2"5Vzc*'F ߏ_1 {N+K+%@2@M(j5H< / !v"U28 P޿[퇏FɈ;rNb}SHL'MU6'} dgx@fm d0.${2zI*(΢Fo폘z)NiXOovjv8dqsV{ꋺ0d1tWՂ)*~t9Ȗ]zJ(qk5zcH 7;UAv`s /GF,{)zF tqAf/pz8Kۓ|1Z\uN.ADoI1ꢟ% v8퐛dnYm5WV IZJ2vWyFޞܺM9$W@Hb:9Tm?Lo{F"8$8cc}u$'I RֈL%=4K-ww8!XYXtt:r[k&7?vNWO\Ay߅U~kmP" Br֋Gi6Sj9:L7j"0/I+b#udu ֲtl. l܎CNb馄t!5242ddu6&s0!z{ԻA= H4u~Ccw5$7KQ(gD+(@N[uZ y|Vi{|G.$IK;H(25%G2k7ZbM(z$f7߾:Iy"YU ^.W;MR==.[G$nN2>x;qc3myx齳K,`TYg?0VdաoBFM[2vE&2^~MQ%zʘºRYAPȞ;i|m5 8+#aBM Bª]MŒafuw(ѼD6p82.HbgY['Vjݼong >\HMKg:9mڷOi'^ͮc^ ٸ͠Ɯ kREI~|ܽJA<[/nza4pZe[3`"Vg_x2`tUق1Dt?2Wİ< Ty## sW|A=Nl <__Xp:X JG]x獻?R ϴ)Y#Vw/ǒQ " es&ȻK{;thozʆzD@*,9gWs)$;z^$SnËQp|nC$@W,w Y;ZoTIH°A֢!.1OmyưUtTv1X~m-r2 SHR69:ncې#»CE 2*s:e_mZ? -gxتr Or39<{rL^+(Vz$C6Vj'Ӎ11oI7yk CI K3DN)i_m( H|1gt>/fׇgb;& |D1rPLKR?"]Lf 7KaM|Ӟ&gSCS\J8Bƽ2\t BJȔwKHk*M :54Y FS2:3p+(U*ךpJ|1x1|o{H[X++>))D{/Ignn?7M}}\6H_R)UQٻ&Qj^TcLW ^Ndi =-4hГ'[#+CT4l|8 H|;Jx+=u]NJ¯c<:wN*|KSmȜfivgId Y) c XrG-VUz:dv@H5FI+ 54ɗd1O({&V:Ȅy*VRNT" pTTtSi?}?^\Thϩ&)o9[!wZaf/_WJ E wS^J˾gb}wsQ_O߇!)qK<;1$%_5R5)w=O@vVzOvθ!$5:/%HD2fQ60NUNy<Ǭ E;-S*v=ߦGM+l+BTpD؇+$3}-%ٰH!|L:x OڀǗ-_H (LQT6"*q2 iYiDHM7ק1&TrH- =QHKCO7^J'%,|ԭfC6vO00Ffљ-Rcr&"&*)`%Ae!/!pꠅ`+^^!L/i28*f6ZlKbMMƒۧ2_I3Zy=:?nc=Do>"hAuQ-̧=gY9,D.^!jtDrilFC1 x1].m|TI7h2Zjޙ(m4}!^@@$޼`'SO>LT%n;yc!c#_=cƶ52R.)L)pŰl+ a[s7 Iz Ǫzd;-s~K7M>%} -Of;M.~P 8'k01Ѥ1HIa6Pn{/2ΏL+ΆhBUx5|T!F[|өSfFH.İd/D!-Ɩ:;v8`vU~Il2;VI]|Lu>$X(6 b ?u==O!;(>hǖVa[|oiya+CTm>C9|H iHe"j.S֔(*Cj!);Sak*ep~K1 v']7/.7 !ې: %ƶ(f갱/p  |T!|ik3cWW/ @a#ӸvZ{Ibi/b;u8IRXAV{ύԦٖwŅjIL{3#iyy >Vc11*Y0\N*HƽŇKoA`d;ɯw"O-]J"ȜI*DۂgؗN^saͭ̍*tPM*9aJ_ 3IVnס|< aUd⧘pvzz0V fN:ǖ9dɹt^dnJna) H _KӆX#rrE#r?uQ { xRF(߯y? jO]5_C!l]>a55[c&-W`a}TQX&mw*Ǫn\7{ctm,c%jP˃m )lwۨKqu!*ottonY77ܩJ==\J=]?Ww?¯8nq~q?A-T_qOq?5-3 |q |w.dަ&/<_ DVi^9 hxh2 Iz b.E)͢Q l1:YɊ",8'`*>q/E :Xd,RLW"Id9JogT\1f3@KuJ&@B x,A k ޒd [Yj-Ah1T9!(*t 0gb@񺱥-kc6V'“5huՂUmpa.% qZBh]Q; 'd:|ؒ3$".meO>Y?HELkYZP=8YAc| w#Dr) "h l`2@K$`#NXtJ^ zDpC6-]K[r0Z;`^ˁ-G$\~%Q;e{/d ^ ޒg0uE~ۊ$q9`尻]T#CJ1Ǐ9?M8]o2seXVt=ev!`JU#y8B*kM0{'\ 2n[{!fRБBmLaKfKywdb񱍠z{(.>LC,HI~'./bKjoJdpH UDp.cj|>z '` |]}4:q!G`G qBPu(DihU9P!`NHɩ݉S-^pşCx$BBRoJ@ѥuȑz.#&UݠmF̤@U8$ M6MY0/r: *s5xgs͙$ԙy#Ejl1#XX۾;R;+[E&Xi>eIi5lݍ )`8dM-}\\%.}T@ iS*XK!?\+ xJpΕ`p~mΖK Wu7Pll{f_WJp)h9U|A܌%`; TnpR4޷V+vy]/ϧ]+GЕ5҇#t~T)=UFEnvD8cRөcp6Rcc6:$[|038F*0-)ZyT10:U[tp޴}{~Y(f 4[m6F"roe5$!;VfBs˞ޝ4cc1ۀs`*f'r[8ݝYvjҹJ+0v yg[~)5 [j+Ag"pZ:"ka]+n!e߭lɹ$k'9~ J>0E8bJDƖ|e=rv:0e7>& 2ovN21cEdA Od[=jlV#XJ|&+-T1m8NP٤KX)tr:mDWx"8B*4*X FQG>^6 vq!EwQű&؁64Ĥj9| Οڭ:kg wa`e[GX$"JX!8j0"| \56cdʰHdX?"}B= -/%!C`@ шv1\h):=m%랹m RD3Q{]pcfՅuБ:A ѹ @˿ޗ~7e3tj>Y)"\**vdP=I6p;bck[ RhT#N0d5+A>ΰ-8sѹ Ve掟^ CZQc~j\b8$4kJ^آX/ 2z .}'1"+L=$ÅjuƖ},X n*[hp9 n`g.  RG-m~\y[j_;3\弁^bD5p-^〩:w}[ą8dBմVsrAJsT=~#0t.P*2V q%so#r|.v\sfa:\X%;3Xl; ՈC.5Wg󵸊y!1U:pUC4Cmp-7t]斻38ѮIWί_#z7u&Ӄcx-w+LX)w>^ʮٹUg:lR@djӓab u[kWw{7st28bJ0U1|z:9lX)zS&QsTomDvU`tiz5Ӄ~ 5yx `iݗE@Ubc@ ۾S6p{dMVwfa}/TQXȴ7Ij.WU}-I ux1^_VgϽeՠyq9Elq.E k s %ݰq*5"A @l"dLZW/jR𒧠XZM2z!'}p5 Q_:tr&dQEzvGwEVWլ%KoI\ ʛGIbgXjc铥&]R]'h߄yUYRL[U/NX4_@ $j/K x:;K?GS:+.c{89) .|AtWi_')A;dp X%n?0Ƞ=K啈yR+9JT`{3{]$9jFydL:i2\Y'^øOOgsП&wwE")aۙyɯߜ.GG5ǤwHP cg(oy)Od͓9oy1&($wG4 %3Qkzg`hr2Oߔ{'  KWuՉ:37B84<-o8-\(kgP,#EV+{d| @7]Ór!OúlЛzH-2AщmsN5eA4۽y~%8takHEG6mg032.X?[dIww|srgI8ݍ:g#&9 4bHG~+}B+$~B'&wr|i4ʼ)2^&A1Lۅ #%  u;_ ai'Ѣ.Iaɇ DR<~=ig! 5;+$ݷ$/?4,f?IVӂOsZo&! ]P$ cѓ~/qY7`&>qg`C8v ח.Ʀ 8bdDf;l'p_>zi\T5SE-IK)&x$14 Ws{fyvMUU(R,O}nB%qׇN5pD_L"/"TDA ^ѐB;Mrɯ%hEaE?Mp `qbb̊2g`fԝJcX6:Cd+)w_x;g:bWyy[}rma_DSVwx(;[s|Fj۳+𣭳@dm Hk--Y/x#r 3eTSFI5bji䅔zv1В'͕<.h۵42sEę==3HMG"4oG$&TX vȂaFjMTyX>gIk\綒&"x4U*?E<_h"Ol"a{KI~AW'*|bQUwLJZ*p{ VR(YpP;R6$Z}]ZA-31]j_Aq[blVEɨ'I2-0`@еZwJUP[&q}UNLULk*x{t[M6Jj &zX /KEw_4; {KGnan 6๖W<ʃru+t4*+ҊpW7SMV"Ep l"!! KyGLVL|½"=-YJJQ Դ*c=C$ꩉ 3Tɓ@F ۳qDN۴  {̘<Njb8S%gt{M&WQb NSL h{DZ2ON4[+᫇>rOo:as U-_ۚv$[1KȮm kZR[yU!hp`|'Tc<+ cA],Uգju[+E`?<,55irՂ-G&̋o}B/͈;s] BӝDv}57GCM g\c\լ$o6OS'?S>-ɰ^Dj݆<%.S#@P&KQ!-ϒQ-ULCp(*XxC R4I>4]Kk*^EIDrW-unc4z%.dK$TVӢF-ےXSyjd+zrbwhtUwW9Wg{]?@^e@,ABΔ\k2`[*THՃw/`ch 5dir,*f 27E$Cl :.bƷprF-`uFA$417=tϡPUuc*X"iI+,L5Q]CƽF;ZF9խWa)G7aIyGU,` ~&/*Ls $1T,mV]sm۸a3nO[t<_[ޕe៹Ra{S&{RX鎹'{K~]B{ g 3ۨ쵠F gm ثl370b?Aea/ 0bC{GAMf : 8{ёmTwxEBw_Jijڍmx 3߇߯REc}h %+3\w#vgܰmW)AرÚrCv~gcL|~lǞbE@ޱeP_dySWhw%?/5847q20+gw\QO׎۝i2l6%%;^O*tmw^w޽~[?:*at\&t\/'\uRDurPuzA?^EL2fV>Q~]{uzX,NCwO 2AJ*F)bq:)l7pRi,~~“wRu*X'IU; w<}&Vڤ&Z]q׺d L[ZObQ)ow-yGOԧ{Q_ (is'28`~ׅxeDQOn&3ˆ^N(\W(N p/;DWW%;uy@aBր;`"A$^/9,/70~M iW!r^ kz.5wqۋ>]#4 "/w8`"/>ƥ}8:G GtEYp7$7r D/|ǓwE>-0B/\0>ђP4'wɍ|;q#|Dr7P}qP񃹑?s1h_c%Wp+݈{GAHgӦ+ "/LԫȀac=0":3B {9v㓀0:K77r}q*UdgyY4S:yDP `\l"rr_]^齹p)(5Ւyw )#S/R% hrE#֎ p ㎛;=Ja D/yݗ@(]nS&"@q{s N:qv='^ /Y׀,=k_~.7~1xţn/Ky<ʣ_W>,<ĕ Kx|Qru= - ;!y^_%W==hz]9x󺦞xA:{' !e3`]\N+hI&oɛQRiC; WΤnwZk_U)#7xI%'HGhgIb{&8IR_8^3q9^/2v1G JP?yWyXp(AG'mf#pK&w?ks-8ki|3E}^7!ES2g*WrcFDz#iX/ ƺ,U۔#.bȻS oJz 0|0~[9d`)jV|zQV.T@c,O6&"|J50sPQ^-}5v>"ݿ J9VteEy*[Gw_N(⺂mR?G'r{)ϗor7-RYۡ5QS^.l9s`OB Mq|&9^9^"z#sN<ޡwRLbܛ)dRmpK9 봧3mݸ۴.:Oy낺[n/4AG n!(_o!(^P4A#[ m!) ?MP[ l!h )hẠ >M𑂆[ m!h)ht+s! HG/jFc$[eIoOfl<e^%޻ǐ\勋b yCxJq9ljLĥONQ*^ޛ v?+8h|͝`с33Q~м?% y%Ȼ|*2Ng gS AbP'~VUVWl'N50* EHj#MܧA?O+ϣ2/cezi"[Um5M(!Mj.RQ]O%^c0ުΐ؀9[}& }E  !I]D1-T >ߊ-iXuؤ})RUA5S1LybyAu~!ixmkV?|85/2zV<[N& | u9N>1SQ&S+MAL*@ОY]K'X,8w>^$3xK2YJ L(ILV -"_Pm@w|v,IOv4s(U?`O˓70F@B*>X0` ՓbsF!o>U93`3pV_A\'-H62QY.0qtvboNow{_җj{ds䢮\e17&I8H<+QL K]S&a\FWj.bzd,.vr_J[lMYaBe?Jq#QXT ,N%E,QH3 EqQY|Q|feP%lR} bpQI[3hk#Q-}[1G"g!(GӜFpA uC>*hqc"# P1`v֯*++TbTii #*:#C4Yei4C`W GxWP.wr 6pH`~/$ -r'{3\ۺ܊>[Vϋa>p,^++X1adN`%q[ [ ,Mwfw Sr澂?L(ݏЧsL=ıGSWVwkݨOS/K׾۷g۵w#w}Zq[x3HUX9 tzy߽f+g\kN:\Ϩmzh[j(!KQ44+!OίH2D~iQR:o֯{ăwWEV9̀iᓍ}}moI.cH`إf[ EB[?d;0HK}jgx*r38Cd*)33Cuja;Ģ vg>c^eR$t4p6j-N? iS h:8j p53%=BtMkmMcO 30PWG'nsigTjLԭ2ElZZ:IHڻۀݳːM`cu_i+só&!UHbI4.m6ʭ,? Q2Z  ͬ+9LWW|'}PuD7k-gu^dve7s;I}7mbGƻy\lf!Gsŀ@kgkoNHNd00ֵ6Ɲpc>&ƞݐzvwg't"$ԑi䁳+˩9搏?!%`cH9>-ZCy)3usT`w9`2/'xvѕDhg:QC2k_޳V^ua+'[rfҡ<_ A*ND%O$ 4xQ^d@VD9h 55Oh(+2 !ص~Qn>H&~U|8Xft1jT%0O0[%օ5GU>﷝%~ YՏc10Rqs>b4v0[5l;A?5J #;mRæbs>hOvNd)Tf΋|T;T"ՠ)^W.v5=w]ytP>Tc#ٶrLvXrvND*ٓ5a2Xeru*}{fLiQWUԠGpeCiNyu`391EQ*%N` у-%7~m`#a2JѼ8L:GcX%D)o$1adW5:kb,ݦEeQH\Ӯ$0C*i8 g^=Ar_k,կ$wA ;520][=ld2]rGkTDL?Fԕ4~@!l)==YvC/([6!ܢCJ{,~_qhL;DOʽ7?Z#˧ݧs֜7@cn+;)8Yɷ[x uAi~cjϿXV7aUcVVێ~e#]_Dq{׉ wz v~M Y:ڕ2|#O~H?~9ykTcyzz\~9v_sN=v*:~bvfRKSs˒FmuF cLbQ_H0tr(ESn,DܰT}d{S!F Eݤt0:Ң \\ X(mtI,>՞:W@_o.p[쇦d)k'|}Hb'V9rߐ(P沕Yi3TfIW 7fdD$v9j5ZcW PplJxIc-G73K-XI2-u`B.K FloITSG p4Â׎*80A,}cP8tׯFz=v%μTᦘ1cfj6?~#A0l ]k)e~׊7)~Hb  W掕A +6c+F7p崷"}k(xřsLzt䉥$ ~M\9pa$a~º]ߑ J:x&:_9$L}2F {Yf9QFB0* !d@1 LoTAaQ?J)|g徑8؟1R+Oyd^Js_tcھ8{5Hs < .1`$]ณwG/Ɔ"5Wrxq-F8ĠˆjB!bİo~ P˱Q+D=%GD ]UAg 4{:SHfQD%[5wfm<)d`}xJ\iո< Qՠr5,Z+TPY01o7[پws`oCT\Z#8wq l)c^Fϣ g1.u8e3Ic{R%YMĽU\m!KD`$::AEU'a i{ 1c͉_NFeR$܂,)&a:bq#il0hcD}F:bMT< 1bN ?(;sI~4-H3^*V9?eAGߞl"!F u_al:Y%dJNiLƼ(&WFLl Ea|WCM7:1pbZ,iR'1 880[=CL9 LN)o18U}淨Z?4p,REI%-2ital3uf)ô% K-qB  sK2K:Jp}Ƽ&ղ³RDdpol+2A47/Y:岍 DH8wƑ8أѣAa@=>_Sd⚝sKE X1cY_ ›ۘ0U[DX/#Rސƞ`SP;n52g*bXG 7/{X%Ohߘi2E*{γKbR61da '( k-̢zw+:E»փjb! Ba&D_4'{K >|£/CXs~áڒXw*!£Ӌ?O;VbN nXV RRւip5[ $MB<¤SzQqiP9h:nĮ&3aQ<,'94  t^^mnLf=_VDKk*LR<.jꡦL>z.F?Px7lUo}- a0& Vw$s0&֤ P ;~bۺ,2#M1܎BՃ()%-,W&l F]|gFϝ Nݜ,bt|bU"qpu.ٳA1]0(Fӡf!"U&jD +&@ 쌰zQ5D; q% jOe{g]J;ݕg!8T2GȌF[#kϝȻajַe}yw=>gfװ"T IpXKz\至~F^~C}r1>8+Sp( VR)-|D[]A zS}Y?O7[\) E#cGe}K3KQedub 5_4":#@ #?ߴ5K_FJMڷ1)_o ɮ9FrWM)IL;'n"Mh]$-EawV(.f10LB[+S-G"{DC+);F$ҌuB/}ڔknN @,O@uGcOH11G_IpIqq4xs<8Yf9IwIc0Cc?xjrS f3멿{3Ys!N]. SI@4~Q1KxuXLYhYl1UK !^=`E7+xoVm` |p[>~~_$8EU#]&jYr"DmVQanuՊκ,F1g]3BI_8b$ `daa7Fs\,CY|dd6K*A'2C1|xS>!9&! +­)i-toâK׏KNS4P4mC Vy#S`B-w#ީsQC)gKov7$8;,0a |15^Ip1jTZ XXYnJ=Hq姝jQ0X.36Iؔk+n9k` Tx!N!{g0~Giz[Bd6,ʣ0N[{$?*Om>8<O 9fndW&QޔE8E7/4C,m<Z\墒EpF<ϭS0Eǘzѯ_ uO[ZejVd52 #!B Mb$ vZE1OR fKM]scgEKՃ1G<*?>U`|lr{\)x%n$9_Ygc`B>"F҅x 8f_E1l IbM%+>1&bmH`550BژsRl [kH]HNm%O./΢I՚B0acױ97 m!2<q v@G~R8;07_sHx2N~h荅<2ߺZb: j5s\_-9&8礸f70CsY2ĸ.3IXkȤ/C}{$8eXOꠘ8>4RWc0ãӅpb/OnV$-^~p~./ЄC DxzҴRIxpIT. sYEEkP51ܬT'[lM )s3b L([cS"kV2L^aK\AH""kyͅdn~ɶt։GGT"$.7Għ(z1on] N__@Yglv!j}GQX1ђ ,Ts#IS:&KTawuC=sٚ'\f:Et5U UhmL ANL"Hb>Aݟa9½\$+IY<5鮽t&S\U['>"mq7͚;xށSuQS{55s.:s2̹,W0"XҶU{eOyU*k9iHrC+)AxV([ "cITIPkNT78lj2I ;Ip:pʎh1h$q0Tiq<&HLq"mƬV$ ,d(^}cb35֒#-quiTqEH,̿[[Ol={O ޖDxFoia-SÍculQ5Tc1٘p} f03*H 3b[{b*krI2taEwgY݄3|wAM9{EZ,č3R Μ!V!I1^oIpb;eYvd=`F0zk&?% 疕`$>jk$)W aS[mVZ}t@j]O_)'v6U>?}owm_! 1EagƶFvh c8z(%(?"ɔe8D~s;0dIE: ^ G_ćW0F!OOH~8H@wIiY Aa6L?/`Psy-5SN,~~4?5Y4DLfc2wv5HH¶.h&QmA9˺ X}6Lg4Eݘۅ .M+.G=ѳ"=DZⱡhה,~29-l\lKoY?LK/aO_ݫ{o.x W{ &oJz0/ `?P_nvt 2~P`+\O&{ t:{GG $=_.eO0FQ$êwv|]?dc`fcK̓dj UCE,E#?%zг|ohT_sP%/ %\AL-خF߃ihxV )D`Y+E4wKҤ;FɂJU'O,Q n͌<VHo^ sQb# 㛼+dYѠf?1of3GKX(թ 3-5WԶuqf1'޸ę)(x˘8 @ll mKsD̡#J%r)eEZAN(4TE*UAߦ.-[PMqQkɨJݛr03⊃csQXbdl{G䨟Gmc< Gk/&+ - l|M(Yn.&q@Mvգ}[{3hmۄSuWVX1q\g׸U(~/ \pSq~^QFlvЄa3Rlfd^R _/o{1ts|{*y3?bA459 ;/qV)΋`޸9Dus@-Y\{Dy3ɕCy(pmg@\Y Z7i{Wi5Cq?‘DEҤ? bB7/0yK-pcP.iuM.[hyB3Z6(ŚM'QpQXcsZq3:vHZD( K-FPui9:QZ9C/; ASsڡ;d# JkGP? U,g4H  Jt$J6PکR; \]v>itд[N!ܥIi!&y{L@} L!ZmV'Z+)j4_MgO}'KYM'x kuCHyۣ'8RlzQި{g{ AE0+UVۀw'4R{]edOΔ e)ɈPVuQ(^̑b*??Lt>夂~fr0 %Ɯb ; .*He񧷳XOds|_wZeJJV̛=?-x|0ϬXEڃoב^ sFJGvNyUJ(6>p˦nKV{+ ;ʁCC0Վz4VH-Sm08eV+%rgjñ[cK)л: x2+ϨC"Rr+59X44ګڻpa4mvAKn68yep,2ژj1y芒8:jWlM(hwǍ U-F-cyY!((^ Tx.o3練]`n iBߜAO:SF坙r=h)s2ʎi -[ cMLU+IUØ`z*tNL.M,Y?HUZ{K/޲k[1cPl*x~\-eYe? B!Z,1]@/{4"UhɃy2_'U&i(D`m| nZϵR@Cm1x="?ϯ#©ۓzƅ0e?Wp_ÒX!O[{^ hU[Cx&/yE:>u"jr?^m3u- d9́3_B򵱯mGxRh56<쇮viN} \LJ9gX#toCDFE&br&3豷oL\n$Xj[蚛DQ!Io޲=oϸY0AvZkD֏_X>0sx2WGɖsƾŽx~f՟lOE W9%Eb^q8 ) hB6U!5ML! 8`R\Q{S(_7%|}90~~}?W?C? lt/Ng@) TWU|ߝeo|kru}\+h0r8UWƯt`af = $9B!xl氇 h20g{~#}'ѾzW| V:W`\}w6aJdOKgmV,+Q‡$/H'Ϣ*%Y8tD7dYØE2%(]Ԃ z, BWr\qVN<) O2{N`|Zh#Z+,8xq*\Zoh6 1hގ!h(x+%&}l*W6/9*_S~6ȦE^~ ]*;)[ƴ |? ]"ȜGoa/*n)*E2[c]o|÷Z4 .\ I{J幵(D{gI#&pj<M-DXӽNO<-^o4Fnh|@{U {+՘E; W̱;SP81`'nj^+A5M4O !P-mBr"2֗@2:PpJ] y EDzIA#ﵖz%>Ƞ1bQkq1fBT2r 0!3Bd@Z:3P'Zvjn)h斒ugn)]ﲪ qTzn$\aK ޹;R2G% |se[$D-.9{qѲ^ *1MlƈwPʕfh%1ђ-YgVK(&]hTwζZԾZG2`8LQN1XyF DXTcv\ IS\l WʫhF(rQoT7R~B2qLӬCWpEߵ7*+V85R%朳P )U`L52i )H.i dj6]ж @A)Vpt2h']x5\Wajk 4BR=BMvbzUI>g+/GC߲+sz0yq؁ҳ5Y`Jj6k3N&7"_dRcc;~7LC%vخFquTP&|( <[ƒPc-hDLLv>hU4m&.򉍤!4E 4?.i\ۻٮ1e+H()+6xnHlǪ%-܂kL*) w_MhfF'&ɳqyM$LF`lq !aG\r/ـb`ϨeU㺌M1I*IJd*(#c=枦AFA<J |t`E)TiJ`:4F+NPkT޻zp!`I?ah|O3aޥ^i,l4\d"NC#8 uXD9NSx*ᖊ̀|:ϐyGQDOiL3XK'WYN{I6ߨoM-;F4@R%D(RkD,B86eL0Hܵ*ѡcF) m.yE*ّVcubaZ9Au,I ʓגLX";))a&q_Cؗ2c cܠIS!n0Rv,7X& zKhuMz12i/;(9ℽ#dFޑ@~l2-e7dCL4#/|~WmGhH^0/`MŬVqR#E.qմ[ܰE)q=v;%G ӖN/*-X~'pOM8|{D7xHPDQWo͸dҕ@0z3/7ؗB .q+`9bumvKrvK(mqwGہ=!wՁ(j.S7DB,ٱ6QCҜrFKhR^}yKߍ7O? A\1Kռn׺m4Fl:vɾ4sJjOviB %1dOjk{f|+g?>~|Wf:?<6:M3 Oo(<e7.nY!%6(4&Xf0xi|8^LnO0Ш? C_ b_f]CTq9]oASjYzzڒ v9+Orh' `LI,ik7(]U)?\#ߌƟio|R{?@|zXdtoPYcw ZVT扟 GhC;Գ+{|Q f)].=L'EA|5q{miQA R'j\g4Nn`*3fSb~}iUHOZ,.aŞM95u-^xJ> jpňQρq(맬ԫQm$_v K=OcXy3u #/ǐ#wD$trg *r5,%Fos?LSx-}bT`jjU<>n( HD:OZ/0)P}vGb=*D-SFn6ot;Tj?@ |Rl7{;vo^U5|=ZV5;tqPEtv^JVIݭZk9hA^9̩b0zM YN2[oi$t &Q)6[⍗ɇ)>ڪ#I4屣I4ekܳ=1BOXr3 DQ@vK- p| + |Rcyŝ*NvL-98m 0z\qZ$i@;R*f`JJB%Bpq:1>]1z+FhĨ}btW#e2bYZb" R 7zC/|l>ɟ{_7iZ.Oaޤ\%/{(-DM.Mzl? _rv^Pd#dgf7|w$U3)WH$G5 _(Md<EY!mՙyڟ?-hZO"uv鑇S1(W_#xh~+son⸠k s<m6S}V=XοW(&t6#Ha)H^r$vqpK,&ΫP-{ka5^jbJ'x3/oU>]óf'yRPPBo,<^0WE<\dx'd $TM]YZa5?ʒ=#`Qﴲj@?AVxfDpB+VV0 Vg%IlZ§!e.wDed'C.Fia!QXC`Sj*]%Yva޼ʆh%36ɦQ HJ]0Ƚ4K:1bF*tWsw9]8,qzjȂ >8]ɚ(Ϲa$1h+h=4ȋs4f [A///\ny2 7.~>@% ѹOW fXݯ4MAQjgDs8"`F܅ޥq9ח~`V tbC hWg^N4aM/><c7f~< )XQޟ=+~MMoe5v,GkxaZx=A#"0 "Fcyq7|iBDȗ|wtf_WȿMχcP$:_)+ PB !"OQr pT0#rM<amVU:=+=% cw۵AmϡmJ>>9^$vh 2N7/^V*7s/f4P \,\Z7kWk|Ln_$(wUz@oIdv1{ =lW% |rL 8~4~G{2sPQ}%yȃsǸQ s>V#cX e"LL9,6!>WZǚC0࿳QZ/ws|pl `r T%2jq8fd{T^weL "f\,Թ]_q*AAcmd_9+pԵ"CcL߇[n5u`8_?œ0.\% &oB>xK[vߟϲEO둻-11CFVk7]JǴ{9r 75):r1½ej/kc ͻec_N-ƳTY5oS^Lʨa}Z/Ki͔LPNrL':sGLL24Eh3WfM?#>Fƒ/&01FD:Bi(6V_uW{)0`*+9aPgyXWYiq<9&l^‹Y 2zW΀W>杬81,|b]DyMgG,޸0G.sdl:^+Az7zIQLA͈uwC+L+_L糹=jrthyidpR`W>ؽRL&1YpK=&jj"R&Y@j1N*>"7˰*t>煷޵ =]pC= U͋{!_*Jl gO+’PðzվmVۛ[>?)Ho'3_v͌~~jF-(ۛ%\4xnGyn̵vf>k!MHѾsB.X Q.~Y+JhOЏ Vhu)>0邾CHXhh,1H&) !T G%AĞΌi637@ F*_ا9SK{]!Y¤8 _&/'QhK%F2֛b xIm'0VΈk{W7ih`zh``wmS.TRW$[,U "'[݀KHc28^A. k{=Yg Iܱ^O1d|l 7I1m i}Fۮ^FFʀͻ \%T*pajPK%T#[N!{amW_^P4h%;7L {SӅy ֖r6Gbhm~&bYdM&mJGV9kDI:t Zwdtt8戴6Bw)"bPLL -o>\x^0T^{۰ݮtyUg؇Slgw0n KKcR*R*R*R*&~V*49gmÛOJ{嶡i=SzX ZK ɉ$Dz,4(ΩhK۷o:\AEGEvl[vTDMmyoy6e`A:~+pVy0;;[gM37pׯ_~/ wI$]. ˧}I$]T%oJUK$$].EK%Ox6V1}=7^F Vi Nhio"q k<ɏpJp>'Z !+" hh9,0g`?n\RCӅSCZ~Ԅ|'f 5K/<,V7a|izA >yx$Yʹ#QwףH{Y]Q{n2+ѭ̠N*뵇1վU#hxs}}V4q >}O[O8s.o8d͜ q-T`=Yn_&jɉ=̘[0Y`&׋2QQ"% x~`H !Z:9cd  ykC~{)譾PI*H'LB(s1Lj^97A-#2j@ňjdwZ vDhAoHNP\A A\7|(RO4~+ |))baCGIС<K9cd  y+!c!JyAdԊ-NF˙bY-QÇ{d.e9[4#_߮4[ 聰@\Lu%hi=vڻj@h%.܍J쀹ᣕ2HOKH=Ԙ[PZc$TCnB)M'.iV-cZFhe𡐏hyml<$5 8Fml s"6:LdG#6nPwAm4{*K#>FI0P&s!hA>rgϛz'/'alkVo4[ۛ,"H,ĸ S&^:{X/. )Xd\:rk|6%~_q&2$jPȵAZ}ӻY^n:amʠ& v2} %3-'G:Cт7| Ue$ѐ<%bDД1Q:/;H̥mtQÇ+t5=Čлx,of+|lfCqUzX.,`<|*sS1's,>rC';@$ܸyCnBKqȜ-qp"OFηc9FF r𡐏xmM,Tod &bVBYm "A\V$C!tUKc!&>הRLeLXAixnc\ѵt:>=Ӻc֖OWA:ٱgNmrXF: 0C(Uf8On=-qÇB>|}( z$Y%VBT֖*b"k5|(n:pe^ٚ옣fMWYBjkݫN1+A2-@7 n~~i΋5>8Gq`DS6Ռh+R,pPkPk07T`)3hQ9񰕁զHi9ws2QQC!w/6ʠ|3%Lf:teu,x~a>z,)L*Rz1Ƿuu>ra'gf7R\3aa7T99Xj, N#;3|(j:B cB혣@`|3@=tM&PVz 逸vUGld.)Q,a1<(gs D! qm; 5| lhsh4.wqs:FtM' N͡1WjPx0E\~mB$i^#|{V4R D:/x $$igOj)tq?[6556~s~51cyu@ә>?xs^|sIPV6inѶQB.t/SVrtQ עk-h@˦/YrU8䘋ڪ5|(VV**)1*Gd{۬,|*%`2UNwd^=.Gd _6Y__Dz5`'X5nrXҫF=c@%hM >Y5/=c"/cNכ00`7M@qWu[rV>Õ@\CAv|ܵ3[t(Ћx/M=V9:US`x5<5i]Ju6xd7rC!7y}8[4l ?>/j݈!k^mj!1F)Ԛ5uz#I+y(Gv8K Z$s2_C!t܂W̪w2k&vsv ,/MC*. x 25I JڊvsՆl_~zT`h;+ dҴw&i_ |(\]5|(JL KH͙p۴^ǜ5hgԔoeys җqeD6-ziu^~šdL,!{Do3$4`@J֛b)ccmQÇBn;E>^𥠺ktQo |pQBr*iGCslc*#y~zHb< 7I D. žHY`ĖSZcD DT 5U9Pu9ꫲ%B΁[a2iK8QS@cz+1/?L^Q5&>"{I*\@|L(36}18-Yɣn첖C'v'jPh-qm}, #H˪ KnBB>|#P$Y/}CeP5c9{+H8ރI7b?pk#@@j,W iqJdiG|Cv!j@Ո _@_ kUxAwT3+ԈEu|?ёz[#Jʋ{d>,gO[`Ua֤Z@*MM9}x"m{"}"> ~|d#ƙIjZZO!%aMShLob9xGK<^>;t5m z+*KdTu(ۄJhnlL ۏ{xsQ cdVrOQNJL2^-8b,p)Lc~% @P^dNvs2BF{!\N`JfHM#q:QBKze!n[yCav}}CR^#y D:IYDv\ӜBF۟>r72]Lf׷7H8؀S&kBlBVa v-l^~%[7}걡<ߝb#^@QS sjo҃~:836]O cu7LwHX\Ц=1qO&$:Q!#8HilIeh4{@J~璷[_}x{qHtt0_O 1t\0-ӺR#J+8O!&cGY+T yZ9XPV19IJ W94&d_sݡ; xkB&0_& [+$ O43.Pd^cRnmqGA獦B\zv|XU#¡ {TjtT0№0:s`PNFitq 7cĕg`7PygrɾHHcG4*sly; F F'TqqL%F[!IB!Dqڮ pW<ڃ@݃C.wRg&3?<~9!?KZbyexvhdI5[&*+sOa3HbL=CU$fɮ "?wl/ _iwN|S KfT/y'=2q܍b%ѧN| {xوB]y˥\>PG|^hƣIhɱg|8 ']"ގ'a8I'K4w&w?!/uVWIuXPy'lzVDy{qţ  ;\p{NS0IZj{̳rz}B/9h2l5rkTg4^^cţ  ;,cd?9eZǬGo=s(Vx,$:PT" {aF=Dagu.b;Qx>hk:vĥ׸zhrF֛&6.:v=4_j Rzߔxw!.R>},\݂)ȕ ޹G8M#s2h(,%OsӘI.sjFiqA {3Kg aw,Q;bwUR {x|h^wCN'I)Sas8Av*D1 rP,u&.`hE aƥxJ\ÁApoq2^u|M;k~{9|R%y K(Q{{0ʃY'>ѯebXwsU?W'u-U4jp2!KPDzJ6ZFIKk>_Z/Iɿ@B$iyҿ-?>¿C&y7D_cLJS/?%;fA;|n d_?4]~/4/s;[24C1D/˿j~y(ݧ<͸]L$s鷖w"\< =,Qq &p0͠vv4\7mddY8M}($wI͗_Z*䇴mX˴&c6DL,wu`s_ipDHӮ}3NE,w>n2'Q|?O$ a~&d?=M{6^#z$W.^MrprgWDwH4m ừOA%]zYv^f]Bδݞh]fLP剳l,֤_.-Ai4/|6I_)W=nZ[N:p owm#q7{n2^MW^Οosud}]?v~=v-|oZIMo $h .'2 ߓJR*۫wIM;Ӭ|<W+4:9=*ߵIu~L%;ŇkBw8Fqv2mO ^䆒)8Zǫפ%# I?3),UTQ$3Y@< FyH#\gM|J| iM| "K|ڰ-ޔw>.MDZi{ڦ[AB"lk ~*B~6(+~- b n h' ̝ #ц@ߍA՚\j4FҬȺĄ$ƅU%UmkoF!PDn ('Rs= l8fJB 勴E&>VpIuaGP=<N<>,`;Eߏ\m- >;0~ӂA~5E-@ ĽEw ꨠ8[gr ~- btcO:*]56+D!ZDAPDn 1PJ~^? )Sџ&H!-vG(H.t%v6"Ih՝puuS^Zj6E ¡ D!VT(J4a+? l:7B/XͷQj!%cPs`'579X i[q~ 4DHNbܑիS)Nc.+#z@>8he=*/`h[Y-54!ĽZVdd3ԴD>ZMGRELsFNj'!کōMwGcDI9I`6Up^M^6kFya}-E˩R` gHa9ϓ<2Ywy#_*hCB¯ED UAn ]IgOJK4orA㴲>qW cպ.SZ V%%29JH'[d'DV񌇎:$k-oC&q'oe8ZoQC #6")/x26SxU[֔7. UNq:ǥO'qֿ4ŝ]I걭eX•ɆbC(< ZBl$^!'@r }8'eyy_y n .%Q+U5}Q!,dDhzCepIJ@Gy+0WoAkb N7KEhAR~i*!흠RΆz)_)w6dԘZ srkcE=&T T[f7 }f_~j-FwZ Ƙt` 2hp YpiXI77ƽzhb?uJ=ίJA *d)ccҏQrsd'V\+OL)oVAo}-bv=7r^ײEQW6iT,+,Nf47ZHVA.ru6!=DZ@_ߙ⋒P^pQҝN­ÆW4վsOgH9.!mY2D.ⶊ(FHNzkZΟȄCvHVBZn1a=`v#Q \,Af إ,b ,;hP`$!ͥ] A6m2 GDP`H]BO2Y6*E-qDn;q!^!%%bΨjDrSUȹdp9w~V_NHUS@ !'ժ1*W"C<=Vp@TV0.PQ<ĵH k ,$VyR<@-Bou!9ccZ4.+D $=1pYS5O !KκCHN-UޤW8':_#V? `Bc֕;R=9 6sXyex1&a%DLwOn2STSmBYn ֟V!Yw"8z#ǣ@!;# ɬPEkuuB{Wf_O/SfJE[W"ER-q򄡅̕A%^mNm!Ľ^RRw@foꔣǩM}=q xqCدk9J9=x0:).풑$8!+B1⤄M_oniWhBhzSӍ(+އCx1o!! Q\WHT! 5F|Z\ȼ+Dnb|3 xbz(_( kY *iHp5!"傎,M99Æ3*/xԶPgl/BxPe贮+i Hle9#PkmjB6QC QA8r]jkIKx\qIv5k7 Q b(M7@1S?)hCQvܰ.ns|y?N'/4ү}i.hOxc.'/^뷧Č٧ FPuYJ}5 9J8U?ã 굽෱!Pqtcqʓ^*>́Rz=so/J[/xL-UxbjUk* Yr~ ^ Zh:bl14`B1Pe愐ɚxŅQ.ǒR qNIr+m{WᲴ. n7YyiLl,$^do LR$4ze;Xn2I 8 ~J~f޳6r#Ww dm f|&H&_.3 l[kYR-{f[ԲԒ([Vr ,HVUࢌc!=scN„m[zv7>&w^W>F5!(*hjlQnH]*12i<%v;Ub1˔BrJ}Btinif L#nEm%6 ,e "q8$T@i ZtN0 ^F2W#AJBi3Ɣ]Z~4V4I(Σ{ 3frdl8%@3ll .Yʁx}wk$爚}lm1B?U{3n?TX];2UA jk8d;c#>K|,ǒ$LwwRt1uŜiu5#uڠjP&ʷ-2[y@9M6*ŘYeQa}Ӌ!L]z4Hc]BjgLC.N9 sMStB3ELC)Z2"#{:nxE98UBbaCX~a9^P3Jℚ_Oulqr'X뚒U e}QCj&s jt<5nZ3ӆ?Ba]?cX3g%9ؿ69C{4*P#0W6_Ҍ޽6* lⲩDԟH d̠=z{ܰE8#9oT p2j&\DE: F{gٛi&]Ed@!:Jl,N~ՏJ= oTnJAV.[4c^Ll|fŇ2>W I?C:g$Y%y%b4Mh+JSm^^wxXwAm yE7~j>7 r+|\@֍\?|1Yz6~IV#؝6gɸzXKWG/*"[- QAwW+f Q!jlEC`L8]htZsnVfŒ0ٓw}ff4f9?YxheM=` &|?ђkhj8ZK i,{zv4/.;e6DQz) zP{q&R#Ier2܈ 55[ ptMfDN$#r5<8='W)I-4ѭlrn&y`4%M=)'!YxF100xnf:gnxԥB^&da!#2|miӺ#{W9"z4޷{H.! g2U@ 2BWD4oWw%uw%19_ܻۢ/9Xp(J 37Hwܡs@ʋ~V"zmMWJI^ yw(Q eLIdAvcnK!5z_h S)Zv6n=\EvIT ?S[be|:yDQ?Clq.$rMn]JSFz^ 5*Y#{A8 CNZa8[!3;f⫓ܰB +E3"klp ?(ee%mȰ1aWzYJYZ8`9k bH1W"snIfk-Jx^bcZlM(-RZ($A?\"A7ii) -O3]Ŵ!pF_JNp쑮*ȏTP! 4(Нd@N[. K(|RjRr*8`nrX⥵ߍp/isApXG tBq"#ZY QqPU>廓쁤گ3BcK#aK@R+\RCx;MDM kC?u95s#mQK] nQqgh%^qLj:h (,'hm @iB\91|R|C8wW;QwU`wa+gH" &VpK!:ƌUjǜ=]:4j$-ES`w_Wy|5_M盠As*,mB>IL[h [.aLB^l*w^ K!bݸkJ&'Y&U52Hu A̩rTI\"XǤ <E7'D_H CU9cp(Y AlN)eOU1-v1 QWv')5݀DVB?UH΍-e-B\T9SRyi"*8 gP1DG'g]R½O.FZ[ b.$FRR0K b 6RaBPH5G:՞hS1(u=@Hb<zd>,B ܛ!DSBi9L8Xɴ5P3raz?K0tNZP2gLi>;-AAH8Tnuͧpﮮ<_L [g 2䒞{voyfN^Z8_68}Z7y\MmZ t{ ޯgjs/>_3g0bZ.146Z m@α֦tEΘD4[忉i,o`j 濑 kBpڛsLV›Yx#:i>vԘ'ۑ48)ӧ: - )aQid9L[rf7Y~>qf˘oW~t>x@R/!BC-G>n!)ˡDg/:|Nӏ%8HTց{@0JnAUxrːP dJ]"b-7E=!UI1ў Clsg\:G.Vc1A%MS 6d13d90'XQ꜂~:|0M9ǐOY`29 wQHڰ+ru:? wJE8[ KNlT]mW>~>~WJc7 L|;_".qHEυQHMG}cHgS֞w|7עT)Y0ǵ,l@p8$%BJ(JP)W0^y+Vts)Nw(0qZ f2RʽpNcVlOu$#wbDװ[!.};":ثof[mt&g;dJ ;4Uܩ\i-ϵCH@" tLD P\'%\Wy  Z-VšYIaY4+a`Y@bPW y7/eRc+<8h8PI !yF[cpcyitAzt+Ӌ*2 '=B8Us}?YwV"Һ?jn϶E, S\-Br[r~rHgybwgCb|\L=u45.bgTbaLi{Q v2&k3!]*sg,,+ 8gv 6%XOHcـ̄rXpzp=6u|ya㞧&Q_֪nQ/6vqq^͖!5kz^et+ hjI@V+sMO̩i=Y7Ɲo~xuS]5wO uz03q:ϋ[fvmUM_nxk`-EcFIS4YNFV2ևOugTyqќΦc[|JT7l6WjkDŮo0Y䓨6,Eqܩt*'l:_=q@}?yzӇW?@?^x+{unۼUXg궺c'j.>vz1[ƒ[ISg (ˉf։r.ڀYv,`~NxM?K:i" 6)YnOr 2vBfLvMrG:5wvbrߢS+OMW jIhN g&xy)U)ƙF0+(g(g#k>}[ͩI }1H}F<#d;u5:g8p.,@g+%`2 ,8* s,Pȓ;Ne [OI9+5/viIE3|k]rZe O%Ғ5 {Pև_"U3nF,D8p+F\K2Np|+eH}V~w$Sş` 9qR I21:Ҝ%w~֭7w4N՜a攌|awrQLcBxl/V+[ϹqMuJ)[etǣ2!>[qrc;TKݥJ֥֣/a.pHw8*ʹ KΰgY"ؚdCFgs@IΖy.MveRƚK1Ʃ:hw<]rZh|YgVl1A~_[Ipb * [^=Ju˥BxA'!^W9V~wUUX"u9.8$- YH 0 s R1ۺy[R|[ ']?ki^#TkE$I5+0*xq-Q>-:,XwPJz\7ҏE=l7v81AVn^f=h4=>IIϞ7G-|Y7//`@}v֦Rz<ax6G՞?^_ węR(3NgFsg !c;8tp, J  B<1q͉r8108OF;O0%1]>#-sp8ޟW~ʮi:d2k!B_+AԳy~p(運NF@g#t)f'h:=]:>?9ppt4%9t8`TAc2D`z1Aڀ(rAw2V3'03w*AL<˜VZy||˳7U'eB `(TP|BB+%:E``X4 1iA!#=F )8 ܧ K`b3潈B g  YA|8~1,8??({| ]@FD2#wd8#-aFp)$H3 ?롔].iWt"qjU:l|;sվ:rj6Zh #ŶӸq<qX"˝iW:'&N(|m2lfx;108Ỗ"|AGq?U. `$$A`3bm܌0108b<:Wǂ=#gJ9Lq7vef02V%O12PY_\%mwaOCN?#^gBC18-\^+2BjCTPe7lUaFN zE:KX64X;tZ:*bC,-:>)^?vP-)_4'47' 9Nȵ]FOy{Kn]RBcXp`\k׬LX ֜wHFV(cC$8iInh8 WN{j[h N'WL* H znކeY rc-y>G"H :@x"$104in`, 2Fژ3#1݁IaIw`(7U< @fPT |MB 2 ;0_ R3yF'!Ȟ7Awg|6 c#/UROC}IGCc,A#у{le%jްLO]!^x%q!x7aĐX1+ f7-71CR#(XLC _yjֹx]uCL(;Nlh ?p6_:؝-4.ʹ2^}Lvd<|`DIp#M_ѯ$~vGE~[v)Zh TkGR;ds#e`'CIHB`MMy)O] %K0IO:[]}zJfQ"@SMvV=o,>0 =\Yk)si8|X8LG|5KboFlĴ]ΗRNO?0(L-%>I2zZ\y ?+ס,))ʳ;,uR#SQ HgC [ ރ.9T yxwpCc`pHQvJA{xQ)AvX Jo54`Bzsi?ӝKuft8!֓HJb {.+[UD4~cCY~I~ ĢhI}_iZrߔf$=j?.:pz+ 6 D2 UהAu74PcؗXVBŗ#Hg C cC$k.e+[&ea0Epυ!ђv~Gq"DQOjI VV.8-UY.5N?,.t .\XYD!gqe5/+ϰ*CAlVõc3Α=0b+g21v*RCV ^@(wŴS&]1>Mb`hg_ve'%iけPDOe)z[ Adw;CG7Eֿn=>pi$>~߇eL]ƤO> <|{/{خ_R/{~S~h aK[b?Z0D~R-&7KaXӰ^2Q'|Kgi+w!y6Å??⟤\o=z2T?aʇUt4?V2l m߿]/wV\S9С-okRw£`\o_pS4t3>9–ە7o]7S0Aݰ~E_vFiN07nhr7f]4p1gs%i9 { dI?Ib-wySwd:۟ү7>*Bc ȇ#z0{yeDI=W!Ǟxk C-uqOO>:O=?}dqa'Ԡg9|wS`BnEODUU<r -Ro7daR,\$4 I-%p6<5\k:7t~ү9X6=0>5\aʵ +I?hkbӲO3^0ayng`~ߛ-}f~I}ܚdǍgiTЗSr!`}Ly^juYAw*e|1 ͸W ڍ &싓ʝ_5>?-Mo'U}c>~>Lh _@8<Ť>ϙ, eŤ- `=z>;v8V{D^6]\L(dU-ˋsB 8;?oqS/{WqJ^uuhl9b%;<ډ:Ix@@߬A@!1 +Q< O폾ʚ>~@jSV\1)OH8fvk@KUՏqZ+[j<-8fG(NY^4_:w8IHQ ު@L7s@$tRgKO59ZP;}瓡=Gk 5=Z.+GciRTKcXkBufi%*=KiPPp+hd/їŏ͈VI@li^}eՎ;fHUNb7jDF&%\9'~qw:ĒFcͤg@g[Z񌙓Ez9 LhGa`e;B%[PGɐkw̌W9OJ.( P($;eV%l\Pñ}1~ 1wutF@mj06ϒT"#n;N.Qw@"@c洐:G7r  (E`ɛn>'LP+L Q~MBV4~͎OYE#̸L(tȨ#HX 쨰,Z9-3("1r;Հ{,FYRwinF܉>XЂ`p~GG88#ݽɶ5s u$Ly;fkAYq=2y~&f9xwP i>Ì5Ռ)1 EeS 4:fl9ZCj 59.E'N? 5m1)C9K~&:ࢊu;fB )3aVQtls. y3{EGn?vqBrO'ɫ=wO0f}Jm]X}:L I:a"5FG.0bhfǛo!YHBo8Vx`.d>[(/[ 4kF!v?W}?''.U4 J{R;ƜEht~.~r0%J!8Y09#CqiK]3! UPC@cM9w0'&8j&!' <3etϜ%Jg% 4e qqý mw"u9gωO]:)1X Mr}@GU`3 ~6,ht̜r% Jo,07ڮ{eAdQrk帱p5=-@\ga3z~F0c,@[HLO"LSdd @퟊znLiaLsZ3Xi8OrNj[ABBT2ŵcb?vu"BQxB,TM #$7уWG %уdh4G-]j9b/r74t:) H,9@G`µg(J[ Aб> '>/@cH҆(pO[DkޘS)E7rz^FZcvܺq |8;usi-sΟ§MQ|G``E"%>֧OneȣlO>u0qk|>wc P=\ iD1 ]%Ehrz3TAp%Af3v|W==~YFF:" ZHѣكB+Rc'g-]ks^1H5kx:59kp1.jvG ~ABC0EE"$` k/40dw+ /-\O?Meq\;UR4jUu u.!! MÑ>bnfj47pTh6\څ\T^ Fwx:[ߟfSHx| ro$bu;O |P^.@mc'/2!eLXό'!Jʀm$2'{HuNTIYgz$B}(h/6nM/zgwkZu;P\kId:kXѤ ٘q6WoD,4ΩhrZ$ gDe hOxK@dDĤp <bX*JaMu?DzF3";0$&oH,CIExg}]E˄w?|7E[gRT{yp+r'u ~ x69`[~+]e"Kv.8q8E"sǗx(\p.8E"s@8|mᰕEW _vE-%W ꜔ބd@ D@Ex<9)44A#*o:o:04>@3ky \H,y4U܃é V2wgׯ?yşuP*9Jr.wr[hlOtgEU|ɼ~յZF!),.\թAC*w;E,ak@* 9ˡ0~A4#$)az1W <0~Arn.Р{k 'Np:N(~q'[{g3sP]>~&6e'O-^L.7_B 5kz9.|ѧj|!Nb֦[Яkлm խ]6}ί/ބ{E_an( W 2sۍPU5~ב ~kq%Lnf2J7!Ƶ}f.f7 ?=9ۜT)~jMT*tMuCq#xҷXS% xEU ?/oW~0qwo^o򗟾훷^woν?>huȋ}{Q}_zgZpQn0$ Ϊ.7g0 yhv%cь{9[v@uISi"4/!%tU[ntgJoBmB45fk;ҌlvQ^)2&g/g6"ӰcHB*L )t3f<{9}{9+D[2XfM,C/ BϽV]Y N32B'+ybg,(%`ŜD=y9+S&׳8)Ė =LNuA33KfU7J1 fJQJJsHr[5V}P.k~e4_z">P$Lp,(5PĆ<Ÿ벻{dSZVe#@\Y3ڪ1!}))`a~6`q>s;k5$3#!mtbʊyBj8 Q[$\ 8 =G7vhc'ZOvpP|R#M"uУ>[c>[P7D-(~QeP|8_cM Ђͽs1FkxXc!nm r0˂[B!t1/vNzp8}aj73GW /I=i8O {y()r-,A9 JTPvϠEP;[z PFpb3JJ% ̙CNX`"3F+ v% @A!3iw0h}\}G-5uuyu/f# _^տ|Y!2j%OIvr5d:ja< `Xݻ+К+JM7ޕ4B˼FR߇|8 E1f]#y,o5[DK0b/?Ճ,2LFw jhk7,g֖V5=N5ͯvsiB1d |v;<Leu}L/J${v rm\|,%veMG{f*i,?x y,:VF6'u5V jv;Fr J?Հ*L(Rutb@k#|JuU5xo{Q@cÔ2 nuSe>`2b%81_f2/b.:q/4NiRlTZF-XݏaYSyx|=eW+P& SZ› X[P=8P0}Ҧȶ >] ̍YfGL]) 5Y.}8^f*U5_d 2vƱfWӨpfNq7"nMcoJQ^W>WG @L ~I/p:CN^#{Flؖ d[! 6(*9,jp'XW؆;1RpV~wC*$F3 Z^D_ U?C=&Tmj?۳0 ;qŔS Ƿ^:HcMי eׄ%}MX=4U2D^B0抡gquB'.g^S.׫fۉ`t=Ń}͛h>%}DmZ: &;q6m5UsEЋo#tR&1Q1ihg29"B uczxM! ۜ-3I6MaYSTqVJ^D}x,rsy׷aWiTFoڥꆜ>Zprlz+o+R` !9{D!Je0` 0mu `5:wH6>^HU;'&68FJ`k Kt\&7@dZ&_I!$|7 $t4dMx<&VD05H0M3^ڽk'9z2 ij<""ׅ//^݁:zXq=mu3-r53ъdh2Sݑ ۛٿKۭIoRA^"Iw =5$ e?0n`egLg`.ʫdI~{tYf2<2T ~v䲥z!ۥPU?NZ^~sv} C/vJ"2Td&AfQhLN3ŜbT Mud"^̩c0?#0m0ѷcLDM b?o>Tu?/'3uşן5&׋d8R U|Œ3JmglŠ7 X,laΖkL+ȿ7ʲG Fgyl 4Л!Mz*2te,m.et1m/?UH1>XԘɞ_r jK^6l^"ms21# LҧjDꄐ)SR4yh/g61[) 98=weέ F@I##IS["4aq+SN~r;a&_*$'kqcʤ-^F E}K@.dh$n YvJaXRw{8L#\ .4"#XXJ6c|gHde?C|?xU ^SL۶ܔ,>)oMCu^A+-`{$_dqa}}RkE|x.ߣ[̽qޫy6.lPCGTTUC硏PePeu%z N-Xw(pƂ]:0, pK)J)HJH! udXhC؎!pqJc vs#đ͖҇ m_kH@3I)x,+Sر|`>qEf!TQ0ҮcKVOzX#|ҍdFϕc;@ ܘe/>0nY>2z7`}pH .5:z$t$#vW t3ыmUolz:R7R[ǭj i= R!?7ʴ8Q׋B]W5,=eYއ;Xwl+Covj)+v=b培(ĝ9܄5|$PIcu[Vylu[V):1\Ȩc<:cjuּcQWVylu[Vyl/ EֻT2JrbQVZko);z%1n,{_ZUTYmb*u V6V6R 6i*I`tPlyӃb ؼauz&'X $sUEP)i#L6,>^TB ]kS-TSx פn\I悞O8"-ߤcf`)/(ϳǯO"╂K}LVApـaynS#fq AaŒ Lz$ _uaX?x͠Βg*΂牔ˤ"2KKTP+ L/ >| 7Þ%Ӛ'k($p\h gjdJ[R0 ')&jm4H)Uj($8,:8 M T!5hET=HE;Ɛ! [!pyl3AsG,p%}p-}"1D5F5Ww?IM1Ϡmm[pkq0b%2#N`"vercFOxW9xIpv.QĮ5G2ƾư'810F" icKw `黺16[ *T2HH*uˋuw ? .1 dQW MfD;a{3av5 W4(R5鎛]@r 0j d4!cmu?^a*YizwC,Tg~}vvr3bmy*L|C&- .v\ͅ7 "J)Ȑb2RYVK} F)U|=^:])VHTgLQ-U .Ȝ8nd9odh6ZAwCP:a~~9 /`]}.<ǧ.1Q^\_?ۘlUėZJetfQz|~wo;cV] zØNœ҉+aG}W/sabNQ$̟Ų#^`˓&*@oF*V4ЕU\@uŤ=EFxnGAE1wq' 7?:*~NJ^"ms-v!@ LҧjDꄐ)@lR4Izh̺4)F 7<Ł:ւ F@Q< R' $MRSoRO{N9H-Lbo2X 4aՍ{nLij;ʅYKkL-_0_"M$Rd!WƷя |z64RJLC`D$k(bĞ !PRA9h# %R+aa,B*IX²,l̻9bځ98%bJL1gxS V!^I`G8n#b a5w wdp 2`ATܱ3s~,۲-۲--/3HO,󥊬zXH`xҌ$܊ږYf/YW›<4c߱9v{~ܮӮM']"cVv[0:FSL'Iuؼl ȥj6y_9  z(v#K !oy*40B"Q1v|؊S UD)2("HĔDs$HH݃(B. dˑ~,kU Օ cB/0y*b#9t1L3:@Y$,BHDLPR3a $Duj{ Au1P*@j{ CIGg]X.@.qϕ/:(OɕS4`WZܙ@B_2?}n߼kw7Q˕ykigY4~~=Et{9b2]\ña6eT+]x̂2Xϙ3B1EM:5VN_G^Mn<-ߖ_4J,~1:>ww 0͊HIw1sl z>J9-L#{Jw3xeH=g\2K=rjEfv%V-^nrlR#LH2yˆF-|ш\L$3#:q{xrw+"X`FXTU1|:{Nh-<!dkYLMS9jd65H=_/~=}YȺh$rU)~ dĈaܑ"dQ*2UEO[Ώ]%Q RϐX1׎r{U.8zY5o.7=QK(b_Kuz,x ݥ Wr4$B) ql6ı8B[c ǛoBr8B[c ql!-ı8v{@"CP+ 5BP+ 5B68 55BP+ 5BP+ 5BP+ 5BP+ 5BP+ 5Bt%ĭ~Y9/,[xH\BW[OtZ}cQ U\2ڍ#ׇ]8s6fv'WCJ0@)pDH2i?+s.y߹tºQzQK]/qOV84T2#ւ`h%_)pih#G:ͬxȃ͒*G8(폭/DOv\w̘yR+wKdeө~WC7Cc(JZ72T9L)Ww3K fCuң`%7N{QoxHFS:0ƉN#.8I5L"&q8@Ʋ62~5Z>u)g/P㎷NCSb/c7AρVnDI2vԗF1;] h:?$4a ~3;,-θO9b(ܠ}ZK*U= Y^Q[|k3}d'6|ֱmZ2 VբiNSPA,Ia(IBuTktŢjiz3k,tx;1ΛE^yprkXkj"9?mƠtܧTzT<J/N{JFJLAG<:z&Exz!f.zc嘧ٱnvȜ^qtƗ<6~̕T\^<-ylqc#+ݷyPVJt/fg9-۰bprw?ib0U}F\6zF!&^/*)>_G%מ>n}oTvz0LkT3DTY/;eS/SLE<K/bƏct6mOCAltd'!/v*ڗeyB{sVf=9EoǐV @қ*1{_}W%$'{NIF$W pS)hD-[LSe;/]H w4z=頦I)e+y%d @CKf_.+xNG]Tz+3}GC4M1 GV,h0݂D1@c9g3EVp `iV[iJ@C;!G m11*LARb(J:Npj2q>{K.ܝ4g35תLRs@}b9vU xn)?$qe)(2`eLp@T $:lN6oAnٔHy_9c?*o2|r,_'2"Ղo9iu~,] =x[M0 7rGDT>Yγ@Ȋ<#v04%&Ő'[, b)D!I5` piA؋xO]&iN(*' o7ŸV='/US^_LnUީ:5O,.:XJ@&<!BBi0$"M LROtɰ U@vez\l`TW,sR{M=p4*FRoN+klA|쮠PEځMcI !i$$!FV0B &,:]Iۍ&p&)UjAHp *AN1$ֈD@ة)1*@; 811A7kETqYcmبcug9ҍxkЧ 0Z޼jB{̑*㨁=Ǵ?.:::hzGcjv#O  NDP DPXIPETVد%hsY#eY!ЗlE/t g[Q&[2-jD??gkIjvCP̸c2C3Q"$`tfbM+Vr0Z׊_vC7&fB!5sbў U{/e~`fg%`= 1FlޠYVL<ՑAQ4 0ð +9~ +FfxX繂$ToP|wPFn!J ڱ 25}PCA'qNV>Yo>OHN|YT"r2Uka9t t&ߑf"hP$$n7`ԵZ|f@֢-ʨoWXUَk$tmFԤ|oE0FP`ot^A' Ń$?}#/0e/CHuR<&8;fo7>Ϻ!rs۫A3E/ue⯂om~oU.+p3c,>fRUn\6zMFPiMtZߏo#qono6 vnr7w$lzYP&K61&=ƏctBy6 uOC1c݇< łx`PHu![8@rs YYHDvN7 ЛU2;$}8Bv> Fm^#sK҇㓙ᭂw9eղjm㆚:274qPeRחVñ9_4+W߬%, |*QS5 ,>ڎK#ׇ om3<3"ĩ_$0 < .0_#;<;eD1=?iYŒ3v;CZcK>dVnLr`Hwk٩񲏿v-.~X [*4G@j{hyҿ<\C{ j/^[Vɬ:q2R8ܙβB 4~6(F{lp߇fRX?O~~-g8̋s''f@,0BcV/6܏?inVLGj[}G^N9&p?[BZ}HY@fŊ8ʅs՞QMkr|(34yOg ۵7"m^f)z!Adׁ{X" L,BH(adSVDiAkd“||j8zf8#~f>OՋ 煉lǢ|=O86oS=f(1LFc S"2ijdhۖȥ9s_S6{w-s!2Y[ć]źO2mmuwrɂWibj`f5(!C}9>?bjEYewۖeg ߧM%.S̟tQ|b~~|7e 7W(J:~HYajlm3,pz6:SvsG Vr:d<}ylm~5KPZVM܏~sEA1w*h=@%_o'闑'mg }hl4ٙMr~1^;rY1rv7}G>S!H !c`8ܚ&:#᭕ljж:cp*|qXу:45S #IDDD)`lx7uwQn7SƚTp0UZHckB6vO)$Ś Xhwضt7:i;?)]f޹ }m|V0uE:n1{2uߕ `6DcI`Qm0Xi*|BYm >Zs>j|5kypY w$Y+u^I0nj2M%WȌ@b?H‰Ew41)7݅(S T*, 4 YJRFU(@9W)Jb(¶ JhCB7Bu`խu5T]thCK/Ż9R#mf|R E%"asFQO&Z.m9qGYfFi|[|tV O.Ft\=|fe]12TP|[i0M[W9xݬ["wF0A!160d;`ZFR>)> ?/]`n]Sl+TvC],rmkWJE I9_S+B6Yquo'2L*&~&ǙLnqtw?|~o凫os՛Km[ZJie R+mݧoGA$Ep.tg` Ih3 [0 SlCG#hy#^^4Amx T&i# \撷:\芩ݭ/E"oEL#*O7q[b;^q #ID!'Z2BG,0`= 념TlfSEg$=a!]}~?GF6 -L9EB(kB:ђ砂\c$NQK`m6x#){G/>*ſ*(t oO78mwYGS{eM^Lg2ad=f M viɩ>}l0Z\Tp=-%&U Y3 (vVOSHA@ZXi(gYƷFOiڏN)4^`B Y 2(:oQ30{-#NLFSh+aMN?NuF D|> IKB9U `˽)o=^46\l=Yk҃].lg{fJ2'׭P2zEPG Vp).}Ԗx93^{-C] kFimUoͯ8o¥K[_ p8.:Py,wx$N0+D{A[DJGt`ZD"rR! Nt+ȅ4*pbA3 (' )#ȁfz8쁋㱎^i8XG!1z ",iP΁:KFbR6xlӲ㴎:N;%h;Sz~\`m)U :{V^FgRUl0);ifr .e? %3ǣOb"}U*w黊]E"}WS[S\in#'Ô o3B~[[P_S1NlVjd:N*X*C}ԁ>ʤR4b2pknAt![&~WUz^'ma+ a1T~z"F =юqW$-MYݯr/8(Wk"g9Is嬄Y&Vh9\(շ_ñɰxR X;YB=i6Dh6WHCs  }gZY3~2O;zenJHȨR0WX9RVJ渣NhjtÒE*\j"Ckڃ8 \2N,s I6mӭoL_6ق"hf-p[n }j ~b^I$hIk,5'! ꀀT 2 mtC杛2_MYpKxf}ȞR0x{Zly&s{TT"/@P+R+?80<[FGN`ł2pȤK,EP RY LcX@GY?ir2p[UW\0s,M'؂ -<@rFϵ,wJÃ& ~Zhbx$lA+Cz[Hmڧ]sQ%?>c Y^gW]8NqXtڿ׵/Mޠljg96TQt ]p8ݷe# j:Rjsfȵ1O&zGk;8K^kj;~Wx.q|*T*{*í4,8r7&9j|y.cm7wӂzgKĕ{`.ӍT8H*vL/]"4 3ellӼ~q7.«GG,T#^\m~ ڼpydOcrWb7RmE(_ooY׆[vk2E28戭#=$W] _'m\tm`tъaҼ]xe/W!1!µ6:*V<{7ZgO)Z:{dMg{\oӳpn>{ W}:p*WZ)Yv|kd|8xmJaZ+՚4h MjI&P@0l I!Z xphk"gh!2@/EyQBG%Sq:CjU-s=ٳdF`w#_ O\*j<7D%+R)3@ sqؓ )&](lW=ygx`TFcq޻P D+45BDBpVO̯?Aב/dZ-=qҬLGټ n"fzҾm|{.y8gH<:^3jyiF"z#!-r_Wo[ͷMv]Ƹȗ~}?\ӴEb ԴfDd]1*ZMw_glV^agn8H>tiX*T D_05!!Sїo?/4~˥<|pdyhFۂ ^ EoGT{jvx4ťp[\@F>e6Ǧ{0!NC4ܧ\ĩ9yϭXߛ`msF8^'AuT<88l >\ZCq]2dᦃ,c̀29-PkA:#Ljt(o,/O>tzPυڳ<5l,_!axwe;Kz J\(wp)BSϘ8P=x:FY^BZTĬT|HqIxO+Qc߽2 yk9:G[1(oOy<@7r=PҎc4=/FqK/ORe.@|G9"Hǀ< a'6u>@WRO'@0@IJxL2To#A^ Q>i۞\07pPBwFe%Gړ[yܧ|ޟ܉-ht[w䧸B}l+I-ͣ Ğ*10C4)*7 S!x<,jat[Ukcaq½q'TZ#y ^l>vnn>n<7HSr4.7:F=b"uIZ9)[3x-wB@Q/1",DDꥦ0+CʘtkmV~fb6,.&9K^4+ ;ݜS+ior8n6HịUhtjs 62( F刊`) NPL2mF =YPqKjً%?zxU~J<~Vύ(ZeG{W!}Nʳ|]O068̿ss3,LI68UM,o/[9 mtۛ7S<5S3=;sE3$ o@%wfn?7k&^Nnոa>__ShM.yKgwmmHymk`wg4ؙEck#y%'d1}ґ},YDYNrNǢKȺŪ8'ޥ^%?^-oT`e>7SΫzh~n /"R}<%sNDQnFyrôȀG:eY,%|و(Jt3) o!7- ;WD]D?,\2:[$/#ZϨ=xwܣ\\ߣd>!`y9܊G #WWQ(Y(FdžiK^-N1I}Im6O}}8WLmLwvIdYrxF-鞒J@{ѽKcSƐ3ϩ?0K3O33 y\0u6[Vh>s>8X& <ʫ9?8]XnTq`YbÄXRk61bo_1H|5uzƺLw2z_z_^6_?J;Ӵ^u0S3z&0Gp=i.)PB &:/6Nb֗epv< Eq̟&w[[`Shz+(-vQ6z~[hfe&k{䯫ܤNͧVZs.e~׮ M[bU|?#*Ťvr3˛ANYǝ=MFQ=/PHI)QxI@A`PqYPqY9BEpl}SQKQe¤I3!Z7 Z%ESˆV:KDZ9BxA-fcbI:-O G[ȕk!Zg]6F vu48[RlyZ <:crtx %iKx"143.|ջBxiTg;:^ɧZC ZjnG^]M‡(~za \:vx=5/YO1t+i?|ۦzr ÷3xb90@Rc|B=<DȦawž3bW?KBq=^nΦn:jrbuWOyquMp\ُEdqWzwᾭ_gfOb2>qg_Wڕ3~pwlqsD͹*3Tz*sό> 1LmV.7rŻΒd$FX=,E bK<atTVˏ:/UWctZ~_7rΥNɥdoM)z༷LxtfEҞVeX۠6畗Եtrqet݊h=">@pripYYVHwx8݊fH%6ܚ6ّ[WzwfN y{nZqd=a#aAN Z[4#P<ͧƭ,יxv+Kkuc.E577CekC;Ѷt5j3nм"p+NZ~ *>_ɿ/}=ID.\?թ0q拫w՚{/^Q 2J˛Kq\geCPԲӫ#'}O~?XAPSLXb HQAk?OYT)E Di2b4x8R1K K 4 椔 Չ*<0HG&2.)`}TDaZPWR`;NXT lE}䭿hCNц,ƨr` w! J,}=hvY$];;!Jq[[X4#(U 3lΚ2(:|<Lf9wlRE3=M8iXkgsմZջl@2!*Cb?(T78*%BGUD̍ETZ !ĕZS;B){COZqܫ\4dv#euJBonvYP[P"D2+Z "LqOC%mVsSw%-4p'_ Xe~Oga<>ob?krm^OZVWn|{] W2K(hK~,-]1}w|,D xaa<0iH[\e-'3@KQj]emPkco6s3_,bz{w5?8zyxryθ嗷m~e܌r"lObBnd[@w]D?,\v-䞦-Q?r7}<9rd9 s+%t\]]XFxfx8eb=,=z0;Ŕ&U&< n5OmLwvI]^:5qY#Ηdi;H>.<, <|+׏G7~ipf]L?v"yJ[Mbb {b8&e@/lҽ3)}*cCy3e-:W['^6<OQƋZ7F' F<ëlz]^Nx-M+Ɔs~{'C<=ף׹f|x;-+m+Eoy} 5^z<$mMde~}^-d]Ib8.;Y WufFuDJLI2N&cJ ̹LdHG8#I1[/|K%eGY2KDe8 ]fD+XY0B,- -")UboY9aB1`q@wSn;qR]'HJZ`8"3-28ssHrsguk3M B`V\ŒTie mr .BmNù-<2A 2 T-n ΖJIvH^.2Q&(eu_JtaѼY/u`Yጦ)ȩdϞ"%f w`3P;%~>>u^ 3ޏS=kx\yu"Z2*g .;8%L *dZE)b#xvQ\9ީ+/pAxP("X2 TAtL8|q1hcWΦ7q7:^|myxP<,,PλYmyNkp1;$+\RD$(Ms0*[r"GJLtCH/:~@`4BȵfvF. 1Jdr8Z&I rDI7ok!($ܦd/}sJ1<6BR2X\+j#ol e)7Tr,i}BC]ġQ0d'PE q4hz":-VFꖑ|a6CIJhe :fQW"S`s?%:W;X#S9(:z]oix );-̮] _֜xC..ün0zjxD4M:a)CoŐ,g_#T=y,`rF^ݘRyS]|Q]#gF%2B5X:jlZ?]t7ܷ>|},M|8c|~c=;9|#Dkk]"ڈ(R bi^iInK{L?E?>èI[ZӴuoҼ1Loj?ĶugRFfeB|aK%ӷvH<34UE>^}Wט0~Vmc{cmy9&b追u9jho6rEn..2_WR<35FX\Čr4PAxlo;ĵx`_l.⟱dUdRF=rgK(4㙃I! QkbStJxtӆt! |?$ Y '6x%.\;"q ls1qjKrאok:1E ?x1[󕠢#z4o[=V]t%fJ^?߈ s9.I&ZA8 4HWI.#Xx7FCBsxΧHRrY2e<C`! EPy ~ch*yDpr6.zck!;|H֫ 6=?'{~Mb]FWWpv c#1Q  K@3)nmB%AA(g~>UjzY|7ķBO nFNj>J0)R,s) zdLct&3#9x'SAugw+t-y"IKIG%5+^b巘<I$iÅ֢T P ]q;I梻vh׺7;nX˭5bN{{\LNښcTژ[ ,XebvGYzfZxƔ^PuuOa)emӜ$7Ҁˠ?W09]"Ka>r*B ]jhE]4".st:CBUT-{A6UL"LEe$BY 3DA-hԵ8SbV"d|'9|^TI=f_ZA/慔Aq>]\+=xjD͈Ink=]SNX|t_QU m 9{ hYDSٳ++UHVSSu1=tCIdIB_H-7ao}9QSVԉd=H|Ty$S2R~J>?~=t>SF?@,Ֆ {KN3oZ(?"޼ %QJ, l gpoMARV#"v. )bFfdMFKsic.8H%VL3)"dFXMg;Bx!jy=dvcͅsݓ5/Mkx4?6 ) Ƃ>'L)*SCǵ1VyBj45->GMdP!qK(v|[#7Vfľa4n&黦iRK)A w`8U B 'N8$kWqUs.#/BJN#-K&)=u!ZGAϲHAp.3vABD4Yޔ;$ˤwɕ+_PK@j*8Qo`<q`ɔRaʕRxRڈ X4<N=ix$db4A#RRIoXhY|,TҎrfQI5{ g?t^DJnNān߇um,Prft=»wݝ$7NmtRM<6w]zdGW8 ``ؚ`f5=npy-an6bK-gawi{_x{EJC -7C{kޅw ]bi[:^9DOʪ7dlfonYnsțե(Vm#;ϫR98 J݌ƫ$e&[;Ud'A`U2jWsEdCM;-ZR)I8L`ڧ{~8tۧ/^o6ՠ:p0bƏ}@H088|_#oU  R ($nҘ|C!q!D ;)fR<|ْ/d-!Ϳu!ݯT!ɮ(}0x<3wEe[֑t,> ;ʚ,Mr&~7.,iIb2 ԑ#BlE]^ntX' RJĒ.ʘsfJfti%mbnK~ծ{!u\6L<'KOMm+;ӒP-r]qS;EYCuA(u$0]2$dTTE!Q݅ dP":S k&iBp BF2€1XDDPjc fCLqBrw{ɓ"1ju|tvt~ o2 5ct/,vufw-_B$i|;i:xȫT3xrKO3͆kdFdaS,h47̠f&ɼR%8@Ҩ;`pL`w:9ƴs$)ֻ shᑩ2P1w6g]Ml:nrV]oSLWjYɲJ ۹eyy>rӼ.NK.B 6*s#<cQ#_}Pة4z PzM+8a/MQǫ }q|k?n7gÍ{m6>{F~RHkKm%9~s{zNadY7'7{AVs\0!tr^X [l>=0Y_+_/]9s gVƬy<\+2Ł` Z^VL-$BUmRR]~>Y'ّB6z>&0Rxt?m|')Mi#yyNe6fTx)Lo˭;Nt`[^дឧ4M8زA;%l!3lwci+e4!iˢێI!g]vJ ʺL tR׏ŒQ/͊gwzrf(jToǼtp6 t=R)YwqIen2ОaX̗<"^7}ȦeţM%4ɪ8^dFFRE8^ԓ'LTH%P0Yg}$!(~ y/.ivm*]dukT[c6={[oD 5v!RFj4=j路QByK讅uDtusND }1=&Gt'N~̏!3[QMq2;TަTT)}rNzD)p%j3<,㔌' .JdB$y=˪g9 ٢,}\)\+8< kG[_`ko 8+;|.beL,#$\RIVqq4>}IYDžA-f\7fZ=bΜ5$ղ$emoUPNY-4L{n]q@Apqh8ٓJdʪZYU! /ɊaqGFX1J_dTQ O#q:HHQ:ʒCpV4qlc! ,H*C9 :cHXq 6 Q&F,%QM]r^'WvOs ?yGA:ECjTY]Dq??ZsTNR?ȏ7ߚwN@/f;U 4ТI58c$f5U_ bB,k?(/g$Sd7;ub θddplFqty4V֟o)HUT!@̛mbnr-|4gUh^͘ϖnAz޺i}}zkrGٮTr<&ĉu!f#]*}לWu%+7;hR 8INS"|b*,Uᆋ"lb6F@lƢW^ˠ!HgeЪoqU0CTQEH@HmPF;IR$I}<,zS]_|>̋nV;nt/U'_h#?ËyM |X J srI,*(8\3K h`MHʬhy޾(n*F x.?kG]G7텟 ٝ;;qw +?M[Ȯ@B~ckQyyl()b.ԒzɀqAB%;džO`TEi=7*&WDi_ڬ7*>[+hr qD|K߷nNWytA% DFᘋ{O!Fϕ". e< ^Gc0ɾ/zo~r\R9UsCZ2M*YE\l"ijNAY/jf7P$~׼)BjbsFoWNX|$v%c z %vP8o{k)ojwQ#jIc-96"&Qri"ʮ,eÍ k t.(]PoS@m毣f[jrVfA&,ϝ, V`2F jTđa|ʓ$*,Yׇ 4hcCNsP gkdLX2Kj? 1Y%Bvkcw1c᳒cVR)昜"C[I.%Y IcӲM;hA>h J2vkU0O>s 1W O km”ʈ|Xm44^,Fm6VȏNB߆L'$8%WN&A u #u뽝S{$Zi yxo\ 7>zS!Կkm^1Y[qFX'l4Z|;> }#[b^fIm7םjIsz2Vi}xiylK'L6/d|4=;z쩺K,m~[g%_n+ K_濌>z M/VcrV9[(7u}lx: + h([h|6g!S3-i?PLs%>Vߵh_eEW0{(!.}KRv}]$]Lq*;|z3hb.Wp·,q)<ʄ6ObJŬ-.9 |/g|Ebt>YcQ:Y{-x.: jԮ3E%g[ ?ǣiKar‘6Tj'8}62<\h3pN%DBޮ kg"͒|PĽb܆wuۯۯYf՝Xyzwu,l?%(]P^Aٛ6s'G ,R:|zM٣~n = +UKޖXfo/_%'[F3/=r3Q.Wweb;g:7 `5Sk +kGa ))\4_v.ѲC&s v2ﯩUW6RƱ8^Im;3fٛ&|(GjWR`H.g# lC݌=|0~;wڟ'fS^6Z߮+]m_P4"r2EQL-PIZd!36!,RH<*eC?X+v1k!Inf?݉f;UuU8e0;R} y=KJ:(E3ΙUYM k4Y5q=t%!!=di h(3a8do]ٶGD[lb%栙TBBDt0``a4>]C5Dhh!lIؠAN /R9ƽRClA3W4a3/F!.3g]H\$-mlRa%} ڸO$>k: d$WJ2EC TvV)fEzNJ10rYqO{< ސx #Sp:/x-4_ HI,tQZja>$ $SNl U6·0#u('` k ւ:`i .`;Su+g*HT)k<Ȫ6J4`d GSQ9@[IoR"B^Am;e)+}lE, >nVSY2IWI +抩ʔ l˝'} c{GbvpxEF MR4eC+$I'\zKHK͡;&Հw9J1hcL@SNL C'}TI,tv``t9O0 J%K &)R6%_")ЙVNecDMƫj~=Xgp L V(bo]B vF%\N>XD5zl{w+%u$E> 02%ƒIh^$=ZA UQc%k"jB()$6Ja2<\ mX1ỊcيS9NJFXn`r7R; 9VQ喥ROdz&]lS6h Zo#E8-px]|׍7ZFA2m@naqXJLstnYB3 ^D/ĺL Ǎpڝ9>d(k{=9FM$YX228a&% .2 v kJ>|U([.."c`nVM*bEH|]R,wPa- QD*,Cu+N& L 0 qo )#&зQ@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DLo 8@i~L H 1E&hb@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1]&>3 k5;&5l@p3 %@q=\9Ɠ:\|U^܌onk::us^b_5֑zyԬARl2ӦwOg0YR:ˈL`"1D &b0L`"1D &b0L`"1D &b0L`"1D &b0L`"1D &b0L`"1 3e,;'ռ=ꂅ:3L"-0^?$nu D>>YK_"񗈿D%/K_"񗈿D%/K_"񗈿D%/K_"񗈿D%/K_"񗈿D%/K_"񗈿ŻGԴuW^7{ǜt^wǷө+]ݞ &ΆnUm*onUۡ[ʰQ'[]_KxwjlTϗ!g(D*??4Zb=0 >ChԆuwʽ b~X0O1!9*ugΞK`o=0W) f4rAJb=e0Lrz˟Ri&fSGdѡQ[/eAx67?ҚVPW(Rȏ7֪£ $gD!a+sD~̾zȞ3Z>pu8b?GojY޽_Osucڸ0Lii>qw?@eIӻԥgrEc1.7:̓Z #uuoQ^Fnhg >tdA-ܖ62'V\qS+ƒ*uܢ%2f;Ӆ)l|wt+E77+*1Ff|jQTF1CXdaEX'2YxgM]PA9[m)^<{O}Cue9la$q:ge+GSdְZr}jis!.g$=[ͥ(B[SbjrFX5i4?;ozu8quy<ïךoiP.S6NdD)&u+p0NbiJ릻i|? pGN{mlrAJvx/W0"ˏnǓEir%{|ۧ5G,۔qh+{iW&f1U2!;a,!~yD/Md֗Ul߮Rl5kk~`u#IxWEE랹3;+ᡛ;zo|?̠sE}ÖZTkv??X_g5s??7u~]W}qø泟\Bi9H1.|xe{6?Fӝt9׋2/|ú=޷\oգWmfo w\>T(s|wx61p]b+ordbn˭6'z()rd5A F#5T"|>< E80{d0m'[r8Me)UVREEL^ڹ,l=="9'X>o:8@+<_~_4vO۟nJ)3 MYkI<)"%2^w޾h6M~&FΠDngWo*mB,Iq!*􎟚j.C6Ͱ6y#K3J6ZUv+9svT{(lr;2]]܎PP3ץ ׵skA"8%>֛8:1oiٍ?u|6CQS- ԓ߻Z Yu[OcGpU3~svW#Y"" ͬ'Fm.;ZȽMSU_[׻~R1:W7{zu3z1遛ٿ.:W6Yh7EGyPO}{v#t~<*y.n~"Rcߺ&TsK|n5/+N%]}t%] P̍rW->a~PfVsΊmӬV?hjMm`bY3?4[HTMl3xLZr:׫CK0J {cYd"jVzpNkYUIeW'6yeqWRrvsOP>i< v,yi:I_հnsegp$TJiˠ;ubi#Nd~oTׁ&|l*#%F`FXmn%0b+y-Y^=uӣ]mn'FO7F>͍67O !e!B#țSij/)r) e_G5VEZ|/uZǃՎJprf}ٷ|dU?҂NüDYEkƪ eRc?TɲVFF暠k g&HL*PpJ瀵|FQzȘ,K2et)EXphZ`!cҝߜig6}]!f[waۄvGDxzdˤڲ{/ ~V=`696_ f2k9|-񷳫5e/Y^c7W_^_TH.w)ðT.x4S [ ƲlR&YҽZpt*':[6;܆b!uɾ5s&|ܛ8-x+ 츲_ʎ wgk_DG؞S̙ ,_)TΜmHmz9,٭*8`u^jNx8?~^>љكU./9 ,D ? 6XOp}+ԭ aK5RͧA۪ﭗ  `6| ͛t % 侁7[Y<WGtQPM W7[eOvvo-IIzy/& aWlXd/l!|70PcWaCB=* ᰀ SU^\b[r=1+N)MNVN-Ftzb)",nw9t{@ O,Ң}:MW758]ܱVLjh]~d#-;B`]z*gxw48)uQo BSmC2-ɱ${ཉsdmJ x_yedgI{8+Y,> ?'Fb}J)RᐎW=/qHZJ3,3ꚺ 0_n;'\>)vK? 0'9(#9/pG d%E <4%@J8 }u4$Wh.n4/ڰftszGDJNcRY6PN+!׭LwOǒp/8Two>fփP8I(X3MAl t2BP W Xd'!vzj,t#YS $8 ='#M(T!3chETH2}œ ᕠFƸ0~~T1uct|ƃŷAkHWi&8OJiן?dWfM~ YJƫ π,.|8U"1p$SHmTV"a :`LQk;=+f HPUS j̚\:$U.|y  E"QP <1>(tO61)Gei`v}}.@FHt.U8`L<0:IK'@}ҌJk#L~TΪ)P/՝7WU}a*\sm q4pK+Al<ʪKPL}&Ʒt6 iFiV0I) IOƣrpkqJYEڶֲ:b"m,G矓K!,>٤ymUbCVۙ\9 Cſū>\`.^o^``\*ݶ.$,,WF˫~X9}lC!g &L|4o+_W-EG!F#r3>5A]M 5SSv+\r˼ܜ2djwKǴS,H}Pn@E0}i'MbnM|]~MsxpA"-X L"q2XOuz!!p;ImhK^ϟq8*E#X)wF@'K##IVJ:pX~SMʧ&OQ|oL=dkċ.v[6U}/i8-| of)ȉR4gKrYFM"zۺXs 9&`G E-ZGEdQ0 rb0Z Ư?Xӷl[&6 wjk]E'NrSgmV 10BTPRck-R% |1M'|~mTtEJ5I|%' k_.1}E*> y\ycv0;:&ރ,/0p DAˎ{| %8%dQ,^ RI0_^WFj{. \HћۂM,&roJUH^p{KZ)j\yT~eiYd;Π͋jfUPl>69 ~*3R'r<B 1̧ PoÔP[@xz&nj]Du+0D"v}XRcy*i^ypBbFvߜc;<79Kx^3 x\!l fYhƧIazq뇹D6)qt AHRVa RL@*Gm7 ,3A~.4{­yj)T,4m׷#hhXzPm*@} ւKr DҊ^e) Ӂi]IՅ p{9ko"88f``2 pGf9AQN 2G@}X͋2=>퉒(jؼpt:],vtAၱkTy:F'ϙARLpLtp ǃ(~лve<01h !{ h啦F QX#V'ܧmgG;lwc ` "ُOW9YGKfRUe_&]`IQN)93ZcaOGF~n%ŷArfB;!(h4=VkA_+1VuWYZ\aĎfiQh:&]Ð▱$.;x ,3tny$OLj9+j$R7`fuwK11}rN[Lbh[5DLwo/1^\57ܾ,F 1>' K۱Zf9 N14s#PtH4T㣣̳1I 7;ݎ% @QRa.)5eDDL ` Xy$RD;ގ ;Oqtvc4f:\qwbow6;0|n泫jL{+Un6sgdJ Q!, J(K"oMQE dz y);y-Xhm 4wFm6*:$;A 0UttYO'Yl린lN 9}*ݯ=',5^uYN>Bh܌;#eTT2ʅ*g6(sQJ<ޏ<=?;- fq|4Lq-:Ytv峱6+үAw}sH~m9wO53ږ y3n|iXMqK"Ro\NJ˫;)y "z$K)>g3E4(Ŝ(aH|| i)ՔҤ;] ײǞX F~Lbvn^OcQ-Λr)Eϗ×sģc Ϻ<1cLL Vw@T|ðqc҆v%G#kAP=j?y懲x1oĔRTd*I=펉u\OX>i[SiEY' NzWFa[=A}囲~xQः_/OkDjaa; G4Cj<2Ja Fxb6J|iG`V ] w`OSg4<\,#O?ZPDž~0(!di 3J/Ϋՙpa x@ `JIqh=t=+}l*$/]$\@vugK_DV=󾬛LмnִCf:.*{dU|jҊb/%YԘN7gӬVxz dP:Ǽ RTK] 6g,O#\.MՏrɠ㝛a眞{ws3уPWY0}٥96h?v| oW@DEyE5H&SA )ŝsVK%(Ք+ Ar54bu0@#O1jveL:(X/5^#:t΁Gt~2'}  A \[MS$YnHP)Z֞h|<i5#1\E+yT[8 d2HWL|BF"iEHwD@4:&eaQiƝVc  0B'Lٝp_ˏ%A^q OƬ쩙Z.($ptDx- VM0i HecYR T"=? cScCREfb`'AEqg<iG" 0#F+zFꖑ+Ð^ ;!4 2kexD`3ƃNŀQwe"xָO)h1.<1̲ߪEx<-s`bR(U Sh9~Cve䧐4j B ɲ·#_E(b G2bgX#`)"g7R;3E> #Jb@UL5l1krTa;%6tu7DAa.p3T| ٍ)H9|m/Oݹ뛵]9MuN5BgQ0e;KL&Z,m~JK3*01[[S=:X~)oϮ W -PAke8U[] e|V~ @mIdspU>8NUWٛjc dʱ|uzD$DeUe~CC ojHWM`H3,90ŠOtf7^J:]dԦVEfP SI9OKC_!,S|A(JTM@߁8xu~~ū Lُg_``\޴.5'蹙,VF+]z+br\*t&>*|_ ^Mxg-w' aw@M{uMASڀ]z6peKn2\蜩ݕD"#ɑfwEP}5~ӝj`l_nktO6.Hb^z%.H S^iAFj95\mJb/xpa΁FHS2MB F FZ48"4u4ᰶGN'N & =s{.u<(7[.-:[Ыᖏn b齵G5 )JM{O9-r͙A6ƒE ,ƻB\[\mh2;l`2P< S s'ЌbiNsݩr2&kG E-hZGEd0)P rb8Z ?KDc92Mgr,ĖE.|2x%Ov/v첱Hg^iOn|}~Um/smwv1WZ HB9U G˽)ױt6h0 tҹjFPn;m<)PG77\E۷Wm+X*@QNIOŪT*wWK(" g#Bp).}Ԗx93Z#wݣf:D6ԝV6GΔ7^r| orAݹ;ؑǾ" #$$KՎzHLdu4(@%LR#ֱ.xiGN;r>9IsbC1%Z8 ꃮY;/>c#[<i!)b!Ed`Fs$8< Mc({YߖW46s>V#.\T ΐ'rҡhXx@JѦIR۠Z8nr :%)v<դ i=U4C~/'t6i&N J\|UGR1a-֦44@ ,mP9  ߂|Ԅ* ?tR~&f%TxcJYEdTÙBdN .R9*PցN5o+Q:5m2&͝/_=V_xW/dZ KI: `'Ā *xZ5!mtCkW7;޾hNC+oi]ӬDZoRL?̇Ps̺kMjkq.`9G~qʱ6$Qs-9˝R 3hb4TIj{yuF;# 퐰`&;-`k u;ĬWi >ݱS*Y @UqM'B0Bf|{y286dv4JE6,m@r%wM[_ͻc1hAIs_Uq bp2J+aFP FmP[ڣR=JSǏ'l4M'l4Ml4VCo6f)7l4hkev2U,^mSzQ!Ѷ;9*r@T0"K+ [gFrFD7>uAhπNkĘn :V#O^ZKp05՘.a>ɵS+c*TCCQ??.&$yEa;zlV›99Bk?:y'J\O`xӧ[m܆[!Ş7 Ųo?yjK^UdFO_m۽;:vwq%9F>JSe*Q``o `0^|,/xSYJIj:əp?{5Zx{=յQV9cC$W7"IA&.S>4aMmOf0:4KB7M-|p3(L*5]j75A] &3E]Qijld*rt\rim.hy kCX3((dQ:]: yVks ϕ =zt$%jb^"5'iʚ L,-yF:H*0VS֯݃_-ohEÉl*ΤL{Qg[BWhJJTo߳G qǨl)ƉpCT)VT $Qr{{]wU)p7?4v83V~rW1U5.>IvB(pJ^>{ݙ [a4ԋٳsftD>QN;۴yG$RA` ;04AN-w=%B<+>t2kyu7zS{ç[ʑr(;[ׁ VQAP!`->GF9"ƭa?ƌr~{tPκ,cu95܇WҥYǢ$NJ:R;笖JCVaFkrlI3~Ԛ~63:Nw9.Pk%*-pG2nccΛWTk(& E9 bVچ3 $E* 3M42R@"#K`#?7;RCMm)qy#CG8IIS< mxL1r j"H26-2Ö ~A0EN93X[eXHb"Ӈ.N: '@@(2N)FfD$"ҌbiNs}l7Ie)tH%T-vy 9l&9|_>#%ڇ<=+g!xT}I!XT&,)Lh-Ň4fɎIb('DR>i`2iͦɄ+o; ~4ƭ;1lHOSS<`s-:ZLjA MDS2:? mfP6٫P?iCٰebC]qʐN+x9ڴ@T~So(<:mr--6a 0fXaaڌrSNvFa'V[F߿6P+R3EyjBcw17X⭉ &!^'$ 2bnо"&,꘸2ÖoaoJiL9Mjj>ۼVMc~,[GH{KJþHiBݸNeZ8I͚5D?"@!p2ÙՑ0ï30,}=,g#wߚV,ocTE0t3ĨXFI픨쾪Nw欤N(P"(;%u0Xj&: y\WC;O CÜi*sXT֫KlP*OM*NY|zz^aN5{_ < (]W0]5}uJM+훙gyԏtw=&եV[xоp];mos^'4Fl2b讔"7w't( Ƽ{6kFm舂9EXU "# ^FzS IWW* xe!mCh(SzOR-lr?4e6@"X-9V{ig+qH.)Clc0`'CSEIJ4Ȫ,L^ L&3Έ/|bFuF*mGgHKj)pňdV e@hdI.=6Nj+zX85L'!O)0`,>[tiJĶ_E4|X`0J;392[,W/f8Hz *܎;J0o8uJ1x3o-"p,D51 g6H-6Q(QRL@*Gm7 ,3A~]=kS*|{I͛=ξ|4>f@eFɗ ȀW`7 j-h$N$%^Fґ0хT9d^@Ѳ/"88f``2 pGf9AQN f#ȁM198I@Xc+1uS/N Ij1t)k u N+vNNk;S, w<|ETE7"Z4`zܹd+!(p~4 ZRڞNo?Z4/5 ӷh 't2oz8MGa ` Xkf KOoJ:gНovZ}~f V U`he)1Sܗ)@R@@b@JHȨR"JԔ9rHY):Yhc"{_<֌) S"!&"Ŝ(ar;ٖs8wy? oT$Y?wy\{Kܨ'?^Şx[}+m2i-$DQab@XD#T1?(}JUnRX-1sni>͇:Ήh`fN\AƆ-?Cc#Rb`e0@ؗTWב} ~yVxaU8 rLH`aEؼ Gi&`Ş3SD=/n"-H)O/(O'{G.r/R̖X{pg=B|Q۫7P=ᢶ;#zVSz6XCKa몺2lPP^C F: lp6HF'g)z\T*b1Vg׻uAma*w-⷟oàp;Bm6``'\9sMn~׻cJMG8`1waʽ3ø)aqMP_ K6Un}6O.1޻|uXW~yN'+_j۱/@E:yf;8mkeM!:ߨׅ..v7N*ʘZ =8k *jR<"A`o&8ƬP$_i7y"o_oփ8ۂe#EoR08!tNk^gҦ)?ˏUgKmr'7/T %,NWq~#JcF1+`)$Ń)0Eׅ)h ZFcq1@V^i`sD" AƊlUpiqnJJrz[@fAEH­LZ %ʎ0ݬj]/ Ndک| 4X\ ;gU`+Jt7iJK>J7Y*K>wϪxPѸY#}#ף8r/q'!-?uUUK"dWj>h4ue[+9J1u>pc0KNEY]O <[[VQRU <&՞<#,RFXpJńck=`fuugW 0u t,3_m+]{l] _Lz`]a.lCW["6XRc#Uc['k?"F"AEQI GqüKqj/SV:N9BkkSLׯcqyMл_6^Wo~kKi~^>H: 8A( X?>;F-NLSNh*}4ږR^os[p4nB2:F0S98[s$*)&&8`b.pnjjc|ty"FT:)ͬy\BrI-E>$ZH0<)c"ҙ-'W=N'~}  q:.ΓZD:++iЌY+d=>|\9GDDC(PlH,F ƐY9ịUht@QAsglU`"';A 0Utdl9; !va޻[*`v @_Qb<^ Y{hǖs.4J%O @F?8J}seJb${AW #pp-k#K弧k|79\伧~9ik4[r޿wP}Ɉ=E(WwNit2ߧ_UI_l(k6{bP[ h:v0K,t% T&rvrF*o)]+[hh;`37>n<~ַ%ĵDڵ92%pw$/adY777XHL<ƕ)DX,-,xGI4 G n/ 7݈)zOާ:kð+Js~Ɵ΂O>+|y;|Y߅3wpfa˜c-Ϻph_2c0SC'b +IТRsC{x6LIqAO7`Eݒ Iw\+n }} Ntv5-:t Khj3H%rgVԬ8y3N2F<pނ%rַ*Z-GWjUkƟQ|KяiNڀl7;_>Oa3!rFB"ZYJL%0"~ <<ͳ83,FUDF c %R<@uS:PU.4\hLN^r^ UB jK3ު$^ ɤ@u@N*GaT<]A!mtC}üBb8n;!KXlX]wط%spR+4BŹ`1X5>b?)*@*Fvg0Af rR6̰=t4!63l'ι1Cm+OP9w_q9i"H++"@*պ pQIETNG풡 r!ėS$16JQxCmPL(G=XޑA[HIbŶ;c~Q*lxiݼW/)dW{8vё#Zr2W "T2 ~䦻ZxW Lp"8)|Zl$Rp]H4B*rvM3rGף&5.ۚ ԰5g0XjaG2 p)ȰhZV4>*hH).yـ%R-,-"T'p™"1Tԝ]i] ft h% +Hԫ-&3BDN HKz-<. B *&%<xW/+ Еhki8נa4R#VR G3rcmHC(+/a(@xFPHڅhQT2#@'DL#ci}P“{sjۣ3_a~\M5y\rt=Ӂ:"Ia$Qȩ3$)Z\sB"P+D.$F9* Km`u4[mMr-U1%T07v~T8WH5_w'_]bmiz]YG,Ѷ}2t2JtYVGe(W ]{ o}qt1 uB$/PDp9g+޹Zz5hfܹd>nGKΓꯈ3iWC_&l. ~ ":|sqOI:ƳcuϜWT%aj<]MHPf-q=DjliHKewgv1sͲ8ΏXS0S,CO<S Izz5qtPO:RVn;6iۄ<=p |1I6I5x'f}RsݙNEi^51%RՁAW[ptN3@i%jڨA;ڡ7b2vcٌ9VɬW>n~U)L;ͪJ+u Ӿ}07?CS|K}@91-nǼ 9N<|؟f^Li!+)2霒.he`V+",4s0sdpv L 83V=VEh^ XnǮoZ5/O1lPmjjuКS^wA Jn>~N?4YdP{͍$§ d V@!Շ..os 3;` :BO:uU 0T-?a|g3O᭚dg2iQyH&8Y&ZxQZrn[ ,*aD: n; "ƃjǹ2D\>7|qA!Vg&~!_lr!V|9^+sW>>PܛỘrbRJoB2HVRA|sQx#>/$INpqm?H4U0l+G* .BU5mfeLȔlgCT^/wc+!RV ^\,;R<95hNpFəDL28ω+<>R{c>i67]kՋwك#pk?L,q$ G_o'UqH~ -R_ +]-QvUbLpT'`b>GruisjWedM֭Zתb2_,9_Gc-cozYl3Mo=I0Dtp7WHBq~1?gyb`}^/$,vޯ_ Ç~r盀8 G² qeϦC{#ﻣ!=xO}iFZכ/54or3ﲮjr5u}]|:T"DO"\ȑzJj:&nub}t_xZ='l$i}TxAfgA?QIYb^EA rb ކt)*FC㪼G3Cxs쌶!MyP$0Khm"I=Lzj\wȥNgSݿ?S\F6~im9~_:Nh\7Eo$G # CqD[/%,x# 5/W8XA'LWx֭+*߳C^-zWgJ,$@˅ mDF|$LS[T9a74jˡyOFscc I2& (OE#BĜS)OM4v[n] 3--m,e%Ovo5첵F4v$KP%W}o|azAU`q~d m4-rM"X+괟öp~pQyBu~uW񣛻%x<ە:6JD89^QJv2[ DB0*QYíN*dc%7bf430|{8hE!8GBhhgD#փprUF ]T(fδhP@a9Jdd|Is!Z橏ok6)EoPm ,/Ǖq>W8kEdct%]0T2UV7rʲ=:֒mLUщrN5%?KQ=8JkfEc eN̚kRdmua!x&a p U !9Gx8e(z^ q,tJ`U Jn)` %ccl(㹲ƒf]T՜f_qMe˪>PAY?rHFC4OfFsdƥ20A1Ԅ&10L>&߄Ml9iߓN%!N h@M&ϣBf x"^s*.$G **kˎ:xhJT A$AK\p^CQRţq[4=sౚ :N8퐜vvҮ`ڂQ;UjuBN"pJ\@9ГI\ r* Zą ]+L\P$DžԻbHE_ϕh^֘~a l /7?x4 M<3G HEt[ iԜehhP~=bZqBJ M?q}݆U󶋫 %tZkWZ6es*O5"ZC*CIY'^2]rdeL?M~?_ǕUkvL70o"5 B$N:\ 9ڙD^0%`@LЊLf)6WAsx۔%惝X|7J%lV1a:;L` u $"yc &WD7 g}2{)jhYy5.Pi"Fb*z\o#P*r}Rۖa9\.8ȥʫQ! thaOE$l6zo5V>GO jLRp!T":^y\(aVQST52U#^U2%n7L'_L BoXMF7cJ(!j:4UbUP5 22ds}md)CM{Re$r/B̑zЭXb :Z0 !o$=ADC!2tfJ9CM _˸]gA (L{S.ώX:9VA޻NiͷpyR<%?Y.j)|-Pji]R6A*?HE5to Md4s85+R@TFz!iQ]td"Ϫ"@l奷d<:S}U^/^rQ -%'R(TB8K#6QSR$*ɭ5y{dYY yo]ܳ ClK$GZW+͆m0w ǩm.QSg Ot4>];>4B;d!Tfz\Kzp}rZNչI.#" 8^5 '$ I0 S Ke;TY먋\8gM>R :EGfL*qkpgٮo@Բ9ܫկ){Ӧ}fv_Q<y}BѧR F׃]WKk*!Thq2?,E#Џ|BV|x4 u0lpnfP}g< 7? zuY(?=׫.=AoNͻί"_.Lh5,९|$f [ J1E' m}d4ݤ 8~&==Edce:yhV=u9e/y{v:MGo|)d}ujaE8p}uWG{\obBkf'Kw19\QbT2!7PcⲈ%ՉKYJ{:K=q_v10 ? SWD"᳧A(0Ό!XBHLBt (DQo4>kM t͊;U4^k$Ox0"ĄNGD:ֲ|S4hZѴ}jګְ=(ŖMDm_q 9+t [IhYi.YQtU%íV$ RUPg oF9Jc3ɏ=z6 dIM qjY1xKKv%(c 1:Jw1]mD+v4gggӫg'7-!'U^#'.*FwcB[yսb֡_3L/g-:׋\/a/.nlDhʹNfo18|+ikI$-]5ڛѦeaJpT'(=BlBVr'Uq^:0.U>_1p;V͐)x. d\d(pJ0Gc##S@/:,UdIՅs0UrFElV}ȴXQn%2t&4= 0%n-_*_*_*_*R)_*_KTKTKTKTKTKTKTKTKTKTKTK/I-urKR'-urKR'}j] }-P{ѧ~^C~g|zk)eKYRֻ.eKYRֻ.eKYRֻ.eJYRֻ.e-,eKYRֻ.e -eKYcEJYiEӊޥw)]V¥蜬 uhry-vL*9*xyR)i`FBa&퉙DD+Gl_L6HEL)eZIARr4gz_iNx3I] ,6ֲHxoՒ%Y-Vfdnf_U_E F#%\i.jIVw 7fؠ.,9AK>f\ccp9?g<7u 9׀bbH'R^-1+4hy8)-ZO3=V3=v3=Z38E#N3t>uJD(ZKT^x4&r(¢ >W rj&ΤR娀֙{h%8щy9 T_=ju us Bnm<,HQ m6w{+)غŴq8-luG[r{Eύw^9+;r| \Yw;F,gEzw5g?kkщ/XzskOQVaKZL+NL^b9\ [@pp4B%BI8k夋3wtqFN8 '=E--Ku_NH,ZEo@߁n9jI72[V;)#TG#u` `A牖4$O ;鍉I99oW< goyV*J. VCGS\gU$F-Lmf `sf ū'«h~:7_q;i"++"@*պ ȅe 'AQqZۦK6]*eėS$16J1zxCXb2P6KUT[->R/m #su-W9k6jd㢢>j!űEGBG%)K 2qYJ9$T 5 E[07ȡǦL"`D."<hA|iMkl6[lki&x}^otacm1A. v<ʪj)KqH)nýmu!A+"T'p™"1 řSQ]W3t XFsJJM$*@FLQSBcx`sVVZ/g;Y{kgV6Uހ3/ m·PDP/] :@ 3J3,!~WG<Dg0WSq]+׫oaAt[b W_5Q.!~% #-vKВ+ywkP0@+)p^6$!@Lp›14 PrRP~۟[(`'^76_~=~˪W8AkU9/kgЏo`{Bd4O<%H˽%#R2Os O}lCg|*nf*SwjSӏЇ;W$Y(ךY +zɒ%7ª;{˻[>*/W]Mh%䲸]'ge)U=9id\RLsego5W\i)i\ʋ1WY\v1YZ`nZs͕ho#Mח;C(_3%eev-6nW`,g_".n8r$`Y34x%?VrpSx7.0U_Fi(-ݑ| x0gHUNX(^ş_.MLE,;~A5Ӝj[4- Ӵ9ߧ]]s=u]aQؙ/1{;ҏ NL߳i_lGhiw$ 0$#SH@!ȭp3SgtQJiRoJj$ا Bsw'g_ Bv (Kg8r!v;FD \YD*0ۥNKJUmN1;s}bz^4|bR}N_VK&(dVh䁹 :D-HkDVx HB>&+X nz ))[n%A೽7 uG7ƈ1bDFɸ:,.MfpK3ָ#M`),{3e1r33L8;)gʳ9Ԙ4'sI#S6Q .Qnj)XP(->.ɴUq͎ۈ#,1 60s!>}F9pJNST"Wƙպg9g _457 ƛ ?OgqUrTAALTUa;T]0ub^=^++WɗMY@N U% bk-Y} ; ; ; x͒Pʜ1pM/ib8Qɨ  hR&a"5! hL42쇵`e69R\I]֞IH10jVn6qrFG_6|,AE{<|Tn?㉄k5>qn%҄86A(e:eQ}+'bl❣7r?M=ՓdՌVPoNoa+q*L,tM)d<xSɂ$ Hl%s^s=+N󔞬YA8I/=q2'CZXࣱ"t, 4p-xkc 1@Ȧ0ADC1qB{_HJzSAxpx;EQ †J-+W41u..Vrgo,>gѧQOlazyoܞO)_PK̳jZFW֭AA*Vj9꿝mKx%av@ %'uҢy63lgȳm6F*$8-fM"p.3s%)KFФdo-yo@bۮIn0v-kD%HWB~ ٤ψe)Esjܽqs{R`@Yb2Nb@sHgnCt:s!eFZU;G"R LAflK0crYDmp;TjVպ'wxxr[@qO;x` nzB0>D>fep1_-9Pn gLFAi,'u@e2{Uɲ1 :'Ǵ\%.ug ވ:.= ߿jWcۘfN111K]F3˛/s{3?jg#4nFDX ڸ?<6#ϐG>ͮ,\. bpnX*3ċbZ-C#0y{/y0R|z}^O[z .o՘)/mOWOPE_1L>`lY[7mݜ9XSapoo d}\OqXrTniaݎ<=*~1eܰ2l t sYڳ.Oڢ8x8jk\^ݘzWml:-6l;p޿v6yUhfʭ7K 8#m1K|':=p |XZrсG[Jܥy\"=G4}eX&x5lc8tZTS*-m֥5IC_E_m3/>啡iq8tpRlO.J{5}@qR'37(ˋ !ĕZS;O\ [GͧMjkG+ntNWGcNGdzNEt꬝טBz\OjovO밡'ΪfURo dUZ/WVueUG/)IlߋM@JƔP2"B;2@%8H02ix j9NC"g$ +ZhܚhpLSe,gU$Oܝ= GKګX{?v[;uIzd%bzi({$h$>[-Zj #@fJ 1R̤ #'_$$r$;slv-E  C(Efې։..0-PVBf/i1ҒXuɜHHb jޒ%4PW!$iM BVP 1$霙,KZSR5Uk=qofzIGА(g#lr=>Yy\-C=vNC`<.#(E,~8[ҟ?k[>"sy:)w/ee:}j1 9lp<~{%x$]? { n9"hz٭ujnt0w(*;8Eɒ!D(-ٙ[ҳSdg%GC*+DiZIrv&@iHOFQ )B Kf)}EVʐ 5Ƅ̽ŵTպ6-4Yo:\oɟ@vK|=!,N{z㴞ࣱȿtwJZKoz:ro},]4thSG;fI]/2oz{oƷ3;U:k3;2vdrEK4T|.W>p EK%χŻ~9h{_YwT^>pi $A4yuijΔA ޮE噓t8;.ψ˳ 9 x.GRp:fNGJX0: L ]츍j0`3ɪ<2:#8j4E%re^;$9t<~{=^巷X_vM0u4ȃI5`*l|b^=^冫i|(_%_&w 6e ;1TN/#Qdel IV*fI1pMT}!(*5NecT*W)AȄ 4)k0M2-)(67k _M;YWj~SF5f2^EoTy3vKF~r~^UYE/覣 >6 缽_(-?.t->LZ8bsvrI@rte>Aavy8\Ѭ; mVG{&{PGpԂ^%|nYWji68=n<.l)厉˭;Nv#v>mf5 -hcT[F,1cŽх[~xsX 1H$N-U-Z,&1a;͘R)q`l2ٸSVTZ3%Nz\lEu&3Uwo!I;݇&0ɔ1“.6@ cL:K6Zʸ8몢1'l賓ƃuTPGc7MrzK.y񜝂b_r.0@8'T^,ˋepJy ^^,CtW^N;+/(6Bx"X+"*ZD)R:2<%TP@ʧH"cFm%Bj1 Lg(xT-n"np!nj7ys=ǧ7].?E?X9?XA˱S)28@$R*FRy6T'i񮓯Zp_]G궥W[Юkб"FoxW5Xl;I0 BqyBfs8 R89B?v9Bəl.2jf$_ue㸂P\(Ϭz ӚFH\cIION#|Fe+ypL0B!A$ z HKT9a7s!(\Xύ$!\љT&@@xF>l޾v$LEú2nBG& F*r*z-@vF#ugQvW٣CP殷v4xxK6ZZI}&srmR$P޺DB0*QYíN*dkOsK4 ?G((Q ԢHkXE*x. (+iQ6d 2DHG4ǿh>CQ 5jNѾtV)p6h7^jǹݱd Óf #Nw7~+Ezuqrw]::LM&S=G햼OEO/ʕ729NMkfE62'dn9ĵ`)6ҺPBqJUI*#:$NBd 6X[J؄L4g,fθ^J9/,/PB w^gQer--ӇMыyyi‰>~=y;^ L[%?%gO-W9i^{c>i67="ocɯ̓ 'ᒫ00 WӳlDh)oq83Ҷh_ +]-ڗQuedDp'<| w_'6uUF֯:d۪m{DL ͪuv9_Gccozm3M=ZT"Ns:Y8}+D?!;ǿK×~˧//O~Oxs̶}{Ti_fgvLJaK9n]x򞝡7aa[pxS}͓fIyccy h] f%\/GhUpJGE'T`Fxw%Ut R*"6!΀ )I6̹KՐc8y9Q'+!SQ'<( Dqg6GϤG&=5sqUb84קI1,j9=?`£w\Յ[&ܒ$\4~) ɒ ՍYHREQsBHr4+ҷ"}ȋ*زPl}irO4\I$0OmJV{I$@e.dPpqe Pxd5ȡ:̊Ϛ.uCj39N* *l"ZO{<>:@"&{Ps e9 f|f t? !j&7MC~qQQZq# j΄1Q[VDkn W\;&D *hf P 82')G9K%bl,'GoD~Zռ.͖MG߇X`Lj _[fןb yOMP!Zb$#{=6EBU1%2 oIoD\u1}5y >|u1NY{3,C C0ۧ V8j:zxzä1 l&đ7;~uh')+[̼4r=>G5͟O68U+9|=\ʮd.Psmi[_=O_]_. ͭ3%ȔP\7ojZc|7bBtZ] Upr4J%BI8벻v#WtqDJ8 %=E--KYu^NH,ZEo@n9*d jG#Ep7hdqq,<҂F0䩰^ J/žBp>.gjȺx* Yr{/t5b*S 0bs!W#j r}g<wלds}C׷Y0 UeЌzʊ*cZ 5UN-uJwy΀Kd-c>"VHpcjQ&wѢ΂  Cz#bsds:-~8}]Nou~\e-n9r씹)sRIL2N?a%ZdrI✻ (#T[gF>ؓx馇W-em[v]؊ K[l;I0 $-$z4s)B/USk]#Y/{ؑR}3;Hue["LQ(9ϟ?Ëː){[tMOWWu*d嚓IT@]*'&q sMP5Y(D0B]֫\u5`-JR& WFk4* '\s ZIlVp}0խ|fwac=Af99(n6mn C=A=Nb־ WW!QDHjAzDRb#ѪUOigSRNO:S5/aA:w4!~e}x@_v3Wk_2Ì.N|(l2Z&7YĩB FpJ+KRTr0:r;9}mr .CDuN#B-=1UF+sp5rs8+R? w^{էJ]ܦw#lO0ҞI"1Ķ,?.{U1d`cM?M2MBm=@Nٛ7%ݛs \IjHXAAF0$E: O$tˤ2ۦ=c |e Xp3_/0lVrǩ1GmݗaҸ0q 7?v Ǜߦ?7Ϟgx)cxɢC5 f+ Fc,MB KzRľX}8@ē ;.G#Oft~rR=eˇfq.N`bLʷ9p9xwi^<t6`|FðO/WV[.> [<;ܠ,V?zd=ߨɗӺNadY7=$QS2BI rA31[%5wI3iˤ1TTw֝EDiEiu (dQ39ku*du"ٝ FG퍓d91%kHDm%]9mg]OHe7!ެwOZFߚBI# Ejf3?=BZ_m o_N-nIiz1EA*#y#r/=^210d7x_W(S"LCo dTAJtLBr*DxE+0!9p8R&3Jx!xE B@-cPi$B)~M|iҁf.B\|ZR{W6{ܤ HmC,Ȭ&{Hg tXTnWh&7c<,o VלTLnI$DTJL;̓!5%ԚTw)ʞ.CE~_7}EFh^E*+AŪ 57qaݛ{VrYf87dbü-x=9;a9\jxq8yo^||NP D 2!g[f$,AmR:I,z"%ccg{ߵ5)'Zkt1;ZVZG0s⹤0Df9-W8NRJЅg=d21JfC`۱rΚb&&<1BR0E{*?N@cn~̺QpiшLS8'IycCN$Kc4%˶ _YܠAZNZr4hg3$DwV؞26AJXO03K(eJdbt֫%ڜX ma+:Ux5Z\xL;1] 3ZJ Q3?^|jrR!mpfKw) .|+D+Z)FZ1+J#*r~ّvhk>[5n~yՃHMԞtv20{Yshxr:[nvI4jۤ巘QuGHg]Èafy| i|'2EG7܎9<^z8 zCvmŨO??|ȅ}qF3p!z.{Tf}2)z~8n͓hZ| gd &,|48=4~w8]'_OCf- x0~|hj M͇6Z|لo2-SYlEBtv*|?+hi|͖&2& g%]暋,ФKQ  ^Hzio\4-|9DÁg#C%$hM"e4t >dFgK2NLB#wԩsPVw7꟟ svۇ^2U}^8gdW_EU)0i9F v<ӍV6Yha,۝=q.ߒ *j'AHk%"f (XTYh-##`br_;:媂#%2]yΧȲRs@D yक-f vɜjG!5(Zt>U;k\վ=` {W=zEg`)rZ j=3J ϔUԞ&m{cM.ec8o"fmyɼ9_ ,}2xbvnJ^\{N׏ڤS**rR 0BDbt&s#9x'SA t+gUo8~6 MKC s0,s2jBr"B(n4㌌:$mQª^8QB*&Vb9&7FH%f׬9Y4\RUN9mjdۢ@N.op8),Z~}&Z g2[m'ީ-=/ϿbU>@c*&X(t6U"'Ii+)5xV^h[hkr9f'%`c\Ih* "mMk9W3 ڒ9\R" me!T[t.W] 'I-'7M'zAG?GW.8Dˢ2:kA򬽍dbEK3B+0kP2*1#e`+34Gdؔڠ.@)A Ǭ=Θ2#R{9%v8%PvձVv`W^H2YRFA&J < S>181nBd)4o Cc22]}؎Bԁ5cSԌMUjr"8v1ynI@ߑL&IPLU^HU8!̃*z"xPvJ̕>Y'gr2/JaZz{KWR M^CuyK;.iQU!xEyZDRiגUJcU i ɤr;qG?z|PNY$,Xm&H͙0[r*6sjk𗪭_ ORGr֖.ٖ(n fRV'QȥEsIڪ%7&e9QZ ]$q % xKP \A(E=֮Q%χUSc:ϏgtKL6?[n!c g/moNħ/€F 'M0!вtJ$FcQj&F晎!{¼3cٻFrWXP>ؽG0~u{mTwkNT)bJ(Ru̡&! LH|tܾmJJ׆\&-Y#/ 4}8H@7~h0C7_꺦x\0sX̦9Y[DH(򅖜G4a8%8m]fٷ3T h ^u8 !fH7hp6HF'g)Oc.sc*eg7W0x' Yi͗zOt*k%PkbBRk#GjVڻTqh"L"$ \ %hX'׌ I{g:D,rBX$tD{iUyKH((tƧ<p$NNM^QPy8":P<^(LdͶt]ݻwOa;6Opebj#W5a.&,Zк|ԺZs<ǝOniV0b]at>5S 5Rk48ŤC #0Cn#QYQ餄/ 7ʥ#<³(V`88{` Xy$RD3 cqf+`qиΆA<}e|oQ sڬ^: z>eY(TB qA$L#EE(0l8O yU}l"OK] ޫ{kse^oUalY7ޜns`odSAw.S*n:dMSw{+U Z6(ux^^w[4W^1ȺKgi|#-uz:W6oytus콣#yGsLψ+ XaJU\`SgJ}=ńKj`aQ@R[0@:3;˜(UVu5W_B/ZBi6Pm4YEͶG`-dfd-58[ G/>KRn_)& %BЈ f'"xgPPJ@~y<.jP JToYZыw6è?c-* 3+yaF uT:a-r`1XA[-@(S_]~T潜_]ceP/Cf0LӞHhMY!#3˧~Oa,!~/?UH瑴^P+-UyMj; Rdk$Kyjd[=ۃO?n``[,xK/B>az`'d~ JΫN{6&H(,d>**](nq]\:10 .f`rH8+BOdW:i`k.$#1qV{tH ZT5ǑypS{yOz~5ó~THaGߟ tmv{{s8Lٜor|.iBfT;<vWaFj~慶#3 QBiDXEpBC4fO݊cWߨx&@-+npc OzuN?6c +_?Ogq>nu&3o-"kDkuM<)#^TJ{+! bJ <* ?s~]`FʩR ˽)V@ᴫhఅY0T}kz*7<ɾ7</fɰɔL+˷drQ";7vRT>/EKº**RpE !T&*Nb 6munw\rI1PeŽB}D0=)Q[M*Ffd j0D0R-c[^Qla6̶$m+z]:Dk,X^箾}&S؁i\JPQނbqboIc%1n3IiVM !? ւ#&҉(yIDHхdنndXlұ晭6vSdd lԜ,=A aiD뽳E]bn!=\HB3@ю  D9#(XG@u&wali%_2%x2~l."Q;9PD& vIAG)v, ]V"$$)Bb%#1S, A9HFNL,ֱ1p-yaRSuf}leo{x&kqGP8X=i нLJg#)Y#T.EeErfF0C+Х\A 6@1d2mkN kщ6]^xWF3ܰVu{g}Rbc4HiUK;w'9Ŗw ZAw DAeLr/d>|ήǑW{C.2 Q 'N}gt;|\!< a0aaQҰfJWTK/\ p,b d#+i*#Of8|?j5*u,{yo^x>uJ:湖`Dbm=TGBJqR J5; {|)+93_'l]aYh`}Vi5#1\1V0{ʩ'4 vھ8Qnf^HR"%1dNF03,*͸J̀E !?\}2G Jiە;g8:I",&4Hec[ `&UtGD/]8HlN3O1Epg<9iG"L2^9=F<:AVDWab(1FrB`0N&|Dk) MZ8LS1hvv yw~%=S5E5ΧƅWWVݩ(e_ %L7|FHb%|Uts5O?nJ~ :s<gUl|}lɔeVR 8"a]\K> 3[;/^.W-RU6fMH\:$K-Å_~y_P$Aa\%4`e5 }//ԔFlQ.. )rVeޕqdB > 쉃d83Q5E2$e[wWKbX( Vw]w+#YY]?0 UjKҧt*nb! *\A.RۥK`8zZ뼀7~a5ě[b|}KWM͐fjlfy|M`Xa]Ipп df79[%hsN6W V')O0إ'!eOla_ޭT XW덺㻋ߥ{w>b>^ᅦ/0J5/X*ss^X?:?]~&k,z[|nl):0ˎ}zUiz,Vh%@`wݛ&(Wެi.MӜ|ve.7RF]UcEHɜ4R;Rշ{FhM/>#_z\H;ļDK]H S^ib#d5Ӝ.Z!ط 3R)8x6@.bFf2 R/ )jtm:Y:0JlkAaW!~bjE2H9589v4Ieʟo*T余|0$=xSo(IEC?Ryڏ\J8v?j9>,3\5 S%eQ,ұiR7əwis2:qYQ 2( 5(O+L7wpx0}kT1 Fw_qs۰<>ݳu#V3#8pcC|dp<)p IO0@DDQ,-͛͗G EέRȰ bZ)"V7B"IkllͅNFa3_$nƎXZKo w%ofx}},/%\bK$ ١K IQ1 PGp f0H!Xi] u\<7N9QUO(Δk?H43&Ft( @O_d9oK| &'d/ $r*ZN^ Q)dk/#<%DO]%rՓaW@-AUrI5ٲî4c'ĮXc&rmߘ]%jɳD$-zkU0c8uᯯ٤h~K?˻RR?gKg^ɨD^yU\8J0Ukj١+޼5S&Vp2@ +j!PFJ`0Ah4 )c @ CYQ5YxL&gtOpBfK{E8Ճ]UIm2dKX(PZ -0aȳ3oUHL6z=^ )XOG ]]ޠ@|x^*jCIm[xwCTٚ\Q}Z|fPņ 0 K -,% V>>>*J9ɣMKXJ:/ Te7b"葂FOLv@nJ7Dd%0 P#19gf|^EXd7 ̙sձ5ϥ D/ kIkaYjNB@vR9*L ˠ#i" ݼY6o|&&uoʰ5r-QqZfp,3[imm?He>CD.\!u0^{˘;Voe=df Vv{'ɐ-&ȂioK9p1vY4Ҁ Y}1U@U7`J+L Ws۳[7,]غeAh!aĘ`e&#*Caj}҂xȈ1M @Ɋ 7YPcA7v-LSf `Pf ū>^%Zqfi,xg`LҏNZ$>%t[Oh4wÎb 2E(#pJ޽!-AZjי@ rɋG"`"R'?∉0+@fc"ҙ{6rvԮqh%ZRCkH${Hj2k# *47x$aÄ>gX|Y@`0LխdPbQH3EI^7pW'yb(oîEYJϑ Xjc8zc?B2) Py2qx-hd=-yn'fc:J!]PZ9KA88^.Ixݸ~Zڛ;9ANyHФ1߽72* b |E h"vf uZ4]G6rKv=DfK9xuUy_Xe-%I]_Sp;.q8׿79zлn!?M'x> լFvh\o.x`jGwV{2(wۼu pR-ډ0nVŽN^0G9W//!V.a{ɷ'Fi.r!#S>(*9eɠ"1M5;&УI9ʼnSSltAC82z⁖D!qPd6X 1Iͩ 1a6`'f%R5K+bף=8խ xA<ը 40oyCpKRJ^w~Z/Gb/t8>HLRiT6J!^ؠLaL )&Wq3rg3d\ (WHD!HX1pB9@76 `ALn1g-d[Bsv%\n!{l6_̗rLñEG:BmG"~ͯ{tc8G%aZ#]nU?x9Ƃ彡9b1B! KNҺ4u^=Ć](ڂ($% H;J`ڋ#)jbv\HB@;e&h!Ķ%#t65(Jgmr%ew$!%H*Xcs,+T"KNPIQko?9VV<5 ]g.̂co{)I6G+SΖWB$m^oݢr2F3ݒ1et FS 7!aٜ:ga6 Z $}&eld,ZgW--k?;p B׺Y[Ի_^>^)x0 Z %e$RqQ'lY *qǹ,q*jaWW NYU_YFbɬKC`Aӳ57hGd4ojHsWxD BHlљQ^#F4+ yRd *"9N$. qtlWY%_Ji4BZ/|18 ih_۟] FQjr©~uxIq`x%[N,q1oU~ܘ݃ȿͯ^x>;ul1`.9 ߇2^s Ɠo6#rqGygbHzHWtn~sY>ROZ+XWbӫ1'٦9\;G]LrݨF/G֘缐:f,GO`RUm9̃;݇:ᡆy ˃4<=fvϿ|믿Կ?|,/>=8TS]J2i ۙ|v:[|= Rg Uu'r4-3?[N'/̿,w 0>;CoZC ]r3rkqJ *ĩO f#N'qop_lG7/{P ytN %-{`H#UQH3H&ѣAK{ڥi.zvRJ 3tX6q!+p$d|(,Dϐ2de?Pguj|t鍏0t5e M_z%-xݣbLfoWrvbZp^,R/˗jtV L0b5[Z^VL49h0U# M}_)ZڝK;kw=VAQAF.B.]H KJ",Sxi9y;r k" |$ֱY)DNJ$% (]7팜XڥJ7DBkq◽7cC)wG]_YQ+u8[ Hu)/iz>ˇ@7.NGDX?H J㼓M-ԺjMNĒK{Ll NjC䫬oǙcd0{4[/VGq),ezz|M^9ltS>g(YkbRAmPHu46*6NEwY-%yw˘~OƧ:+bd]2ZE26a^Ɗ?@&f-]EtݵQXj4,ŧKWT)cd 48/j 9E4P6p r<8ljO(<]a?kUXz}a3uk}Pn?ږ'Ud`[q}_xZy{ -ŝSASVQ!:*Yo,: %mk4YtO۠ l!*1q8GbՓKN[*>* \)l&# )5Jk9(Jy]gI:KJjD~Ƣ56G|1VYTH19&i!I3Tw;#gFu^Xk|qcX/^/zqU^]>P,W?kz1Vޫ*v}On{WJ}{U 7XL.}IĪJXJD0K<v=F57xz|tzM  B*C*MJI9g =;z,vܡffOIB֌(H%J .$@^w j:VłuΈ9izҞPwbiSܗwa$ ٺE+"fKs MDϧ˗J//_,X*%Q*Yd#!ȹ&!g4< CLِg!,EK{6!PT!U.v9M;oº26dm>,^j|5uφi\ƺh vR^ͱ^:bħ̳YG4 Gc^bf48J(ϚU](M h 6aJPR}I'YF[J^Ho=&;+K{ҍ<ϑB<,XCfܸ+hrٮ#{=2}d+/ 1Kj37l%n}Nq9>~茠Cm!<m oMV@ޑ]Cj't!FrrhW2KPH%elJ$E Zibd Kθ.<H)7THk, qHgܳY 9=|إ޹Oφ_/;vۣ3^U+t3#Qg{.o/W"Zvs5ÖcIUdl{[i9p DFfQTD7Ki2d)K2TJd P$9%xl2uV~mI1<+}=+!Ku{t{s0rYU;YџyghsG븹65 -t4-,΍I̘62{ @^k^>]Ol2=wh.6z5͗tB@jR*j*R `a)'H93f%z7sM ]hSavqfa@\p?A;~u_ 7˹ uhhX3@YO$gGv%bod5BG%b^1,VgIY(`!(B &*:DE"Ւ*i,B I5FY8ATm8l3%P% =s|H5gM^_)[x&Gqkb;6?Jzjs9Ikh1},Q^kBE CdYDb!e_ fC&^tx&gcUZky Q +(VlY ;M$Y.Z5O'!*݊#w+kl.2߆@[ #㠐shd1 쥾3rBFXokdGC5 kd7 {InJ9N~!{kxw/2=Mf̯wµP E t*2jXDVK5xFw7nJEb0a (VZ)p!M&R _Vn}>1NpȄlצD09+I&uJyd`GBVfUR$'4߽f%^ w x?l:.>5+:g0cX\M5EOB [W>_C"dVJϬ?QP,53!J'pV c($"f0gCD;d3HV7  1`VZ &X/ (9˘sLs): ^;I-rvCU]t64g)\fU(huX(Wj$;U!!}yk>,zP(<)x GIIjiBJ QK&!me[3v/KTs .(NоԢh={9{܅ NN)%V6ڡRs@S"p%?bɊ3;M Z!u5:: m]rm":Rнr.K,&dN8C^$ Y#!zs G龜1rc1˳ #7Ƣ$ y3hFJ@G Pq6kC[ 4_ýlZk+J?'5,)6JDaA)m+rXƿ']]y4dz)9o0^wa5Itjo!>rvZLR:#BJ$b43l| Xn.}j[A璆ڰΎH;N`#M=dJ}TfkV ʒ&f *;4xgx qݏK;yitTV߬{OAp Vdz,fC %FR-$r`TVfda :43hNW(9m' V4qt2 љgBq#HT,hG²c$`2YB/Qʔ"D.+Gkp9>B",-&Kj^m95-&\~mMRw~T{kR}cWYpTa&q#=/igV ղ}IpiL}R$-9wq;lIBۦ8uϟ ϹYL8 B_;+t99 ZtOVuRpqrvxFz1'Jث 8;0mv}A1*s~ܘӪdټT;?uop8^:0is3rmZkFtŢ+Ho4bo$?t7ƺa3/i1O;D+XVz۫1'Ŷ9;`G]OoԾv6 9HXJmIONnnlOl:_=1H}ßą>|"A7/w*1,f&a zg/7WTi9 >,1L [ϟ0Y`}w뇟ZZCx롥S -kzkkNgܿW*y;rnH?>.2p Xn︭_lՏnn߻?N,Z*b37\d6E>h^y-6ֆtima,K IN"%ڠLq>dVFO20^Lqԩr3j<˛O|SvIonuP5M]Tfir|+4 f;&[r}٢h0#1NGqK:(FhXkjVw5( x'Ϭ~!Eѫ`"̊4Pg\EĄLf^In |Rޗ@Jͦ,kmgMVD IpE5\;N5rvrw6wJ['/63÷env!8jjɌqաŌ>\h.O0r.a8C~LVF̍7d&qAr#ywUQ[w0k8\ڔBJF璺$:S¨8B1zӉ\f!e:ŰںOe{o5L7歧KbfWecyGUYJ*svQ2'qZP1y* z+Ĥ+R8]UKY/"7)7LAJ&k+Α5Fk _5rvob n(CZr9`a-x-t3mRZ yiV s Xb4x/;K^Qޝ19*|$zHOHOVT?ʰmʑw|;eb0p c^ϖC9ogZcqJA6Q΄ߒ8ϧ$/trb!TIWaq4P*G̐#RD(}6r9(6ªFnE2;V:FvjCU;xKH'/Z~+^+ IlĖ^b` 91nKY`c/L歲yW;QVR(D4\o#C( ^* k߼~<sZ)fZ~R]kR T8? zS!&$%6j@MPAzS~K҃s~T[OIbOH^e_&SI1R8":<*AA]"(Ȯh0]|p>W^b\6iruK' ;Ӥ &_|vtJSBv04-r'賷m}~l~~+^Nd>dei~=;چjvR[R2ɷDUo'y!׋DE!7t'B:DΜNgh縨ْNfpcCvdSRe!z%nNS|!|u*1 <@ SZ r'Pg\´ї\n«g˛<([n /ᑟ/Y41)ЮJZ{ _RW9 Eouk \TԺKfz{vG6ۯqfӸ}5eC-wmy|xw+ee秧[[՜؞wt;5-d7Ek{nGl:>9DvhJ|S>CAx<%{ufUȢRl2( Jٻ8WP&/L#iwc^F_[soÏ;ŊK/xT&g|v=n* ] NIb5e׋Շ_/.ӂF_ѭ-v!u{&r?M# OHֆRc%b&Z[i d\*<+:R5:{ :HR:9h2@UՂKԎ3=E}I'+ SL:3LI`+&1*.*unl7`ǭw7o?v5ڶk~8Xqo+FdJ.Er񌮮f `cAɗbLڲ˗MZSG؈#0|~ډח]wC׃"a.VjLv(ʡK1VkU %'QdT Q7#n^,Jt9k梏W7~=_D_yKS?NN^.wEGRPlw2X SpVW>Oš#Z뙩@ B0 ?K@5RO&Rrkպ|Zbyc}t)JvcǼ5xwN634G\ ~]7ݘuύ*ͮl vxZiy([i|J3/t= .A ZqTrfR,f_T8\qK%K'%lwƙd*TuSAT PHF:d=9mumPU\nAodОAW*:눈MAܻ8ۓOQ {mG/|Uv{7q~|Ëp}^ 1U Ahp,MZp^1Фt~T Ê6җ>Y~R]*G-zg> ه9$_8T欵' uhLT-bgrEcV; Ѩb 9F+?;~Lх@cE9wS/q*^/EK/uuPpsp쁴\\<%~ۏę"8r+-Bjohd亢NJM?d?Q9| g&Se[w jq %k1pBdV*XЅ1"3@ko HFnlG~J7,B茅0pTAH^n4xC.cM,m_eU_0#6A,Uusya55ukoErNJZ)Z -K(8B 67G#p)GυX <%vΗVy(M;ڮ3jÃ}O bm ^"XHj'H*%%Ĺ֒RmnuCv>iYhCuZq%BTNY!xg*.bR&/Vuu9;/_q𚨫u$9/rUuD >mߴaLqzVg۪E'tkCT.*Pʖ G(uh\mZŜĄ |%b#p5D5;h&Ύ5? RR2ʲΤ?VLd0+XN$]sY(P[,EM[*/Lk䡮NRE#ul2%*48%jbWuH9tXR>ۮ1Ϳ^ò1x64+kYbT:JT#R>z,S[况 E6v@6vMmmm +nnZZdHR:"ꈸx, "mTй wzcm7-^"q4iI%,*?e+U߫Wq7dڵ𤋺($v&~=h+PrV,F&ȂBc@oXK`(8y~yoz>/oyE'eۼ%VVW k9r%[88Ǩ](Hj,1t;O_E<MX@ #1iJbkIL[_)]u/%K0,*b ZYR$Xj|QvI=0cvm=LPsNQ椡c,M%U0pPڡbTy=ʊjZ,X#&Jq@q+Ȥ8ƹ [2x9_[V*oNfmK'rO~'~k~1"v1 O-[t>Ӈgʓ> >CXK_gu ^ RĂ%hyteVWiG19Ͼ~~H8u DX\5,c9Q#r|@\֯r?VC4.nZ\JC;bSٻ6cUI^vľñYd"@CS"L ʶ}W=Y,tq;j069*G%hIVZ5W Q={$ ́}>uzKg7W7Nҩm =OG޽C>cǟ‰'XipSռ-Z-{?_bv)o-2H'aGG`W3yU keO 㻟MPSCxbuMzuƕMNbܿ2zݕ/!D"XH|PwE}MCvWتKM-(fQ\H;ļDK]&8:xa҂Lsj蔤Ƕ6,%+hq8΁]aFf2NB+$Z)hpDhh amKF.u0Jzc{a⠡FWz}zCo2ΚzXM3<$ø$RՋK%o?}3M[В{ɽ}_j afN=ϯFyD C<|簺Eq8/jMjSD0D.mD8,T?~⚯!-OW,#Q pthp++\}X+%P ֏%\%r1o \%j>uJT\7wpr",9m\%5m \ ѶUSD%\@bj.ZW`[W\-W 1O^LT +$Sm+ ?g~خZɩUtK+֬EpJخZJԩU^"\Q<D{HF@Nt0ަDnύ(ˑn L'jOËi EpVbPX!WZ"N*_"\iCD䭁Dn͍(P˱W\[l'/|v???,e  ^VFA3߰b eD*,!fN2V9 ~ >f 4Ί}?x30ƻzëO3 Y&\~?{35i*bVC~5T7w&5&gQx㥘Ist:k4r X_Ƣ{WQQxkߚokV\^9BTYW2!wY>dvJQ]&2GK!z "3`OLj9+j$2c*7L_֏^?]m襂k0Pe{Sh}͇2YiXwWٻ-훊Tr~Qh`VӡjZAgg5+IV]58}eWTaﳯgI 3=֞E292WX"9ӽN_oZOZZ,kJԻ2vj6S_iw Hyvy7 ynRmC!Y2ˌ2D w2^[/Tͯ))$>- ۪Pk48ŤC #0Cn# Rm2BĈJ'%G$ e Y(ةl"KM%sÆs,C^4U璼3{uf3{} za?vkмqǰ{B,gGy!ʩj"v^R#ߙWJd#8Db v hP?v ~/Ҁ'd9\7o`g6302EpbLU ()"Xl|J:`x)6) dx/VI Aprm EX⑓6+*`"%3"اnL%;ҟn;Ŝ(aH9!v3ͽdο ɻ[5EG@w>_ ;nnfL*Z1+|:L&g<h7ZsaSy",QaTKmGo,WHBOC3L/}=!sC"b H^@HF+tAEj=6,UipP){#R1 z \)&^Z!@+UGݮolֻM_YE=[WhBpH.v70D6$/5id} n:2h`~Q~⦘GMau5 qt4W|p gT'UI7LaŨ?hr&E?2с@;yrVR z&Ù scK<{4 fX5F)ƅ;cZc5PYH>jݓ:cthNTw'H}`+T/cGr2wzuVzTo8rQrjQa]*zأqBkn)U]r}hKJ]H ?NG3(r\)+7l =7wb-.澸)Ͽo&i&iJ߲0?L\'[]B\G[iwh;\op3]0DмAfy- 4o>;:6Φb&(H:lh,؛w?h&TΤqIcr -.:aݽ($24)!#S>(*9eɠ"1M5;&Q ̘4шi81W\iOjN,݁D!qPd6XI*@ͨ&#é 1a6`'FImsixaz=>6kXTt/pj\fK?"9JrcUs QR!]ƃ'xZ͋peȸ(8b4<03mCEgW9{Zk؛>i!؉>O$TrVCxܟ}ˌfXzβC~DĒZ3RƷ,Ȃ0C4饦U'WP+}UP$LPR!DR*1Ś0y HJ-oH|4"4R`BhB*Cagl e2b=6MVHKDÂacLfOIT9Ssp)$b,OAMAX14&NjIce"x IM Q3' q%*ʔJk})rvop1)xKA~LRͩ86':qͧhP%׷]dvWv3ƭtb-]py[TFQywN>Jr Ȃ|!T֏*Nb Nh0bfӘ%ZC@H1 N9ťoW12#XP1g&H5U8P,d c!ٗa$WDn]6/Afѷ᠘>sL;RT()gzȑ_IeNd`,z=L7nORJы̺%eUYRe4|)2 Ɓh(B@a%p4BC8 YٳpP (P$6L>&ӈ.hVT̾HθR[t,En)*q\D.I`s:;Eձ\TE.-u9~ 9;NCRIs*<m%5ŧ]qcX>2-y =r5(8ԍqx x?jDw2׳O[HnnNkE4HIj#N] uVA> }vDXUUGNf[d zXT&zX $0!HRA 0;1D)J aOA(E' T26H (㸡>PBZA=N#KntR1/o'jTȮEZLH~vroo]M8=kzV?1 {7DNhԂٯkA*C!p`;J<̈́R:-b+j\S`VjlC[,T́2RMh٪>.~(I"]y"j*;?zn~M[_.w5I04'T o*A/%N&wFi `5Xo)gQ & + ; >/[!|-n_J) [I⚋;x{!IrJ$M\\E7nou)P&DYȔL╻|2Axu/M=7WKܷ8rUBFm٘3jB _^> y8S*.#b,Jw>~m#D'rjӛ*5No.''~yFɉędz1q-9n|QylژfOWdzlLuMnn|{s1j=@ HP b0]]LrnWDp4I o1q֓h{j놵wtk7mQ#p,]|4<ɲf4o핑ͽkX%-zdw8_#co'*SzB__oT J? ە\_ ;~xc2}_y=8>$h^Je B6#x[|aa0|> X3TUy'h0Gը0 gMM{OoO}$U{p뿽yko5hKgK!׻W!1iTBl?擸CGu4e=;f,}Txƒ (n"]ļ.A r qVȢ$=a.]fGC}zj]'ȕ 9u,(pFk!z& xLzj\9PW;Gv}vL븪/ jC+*dO22V^h23] /_'G^7 b~Wf<9P}kN`?rS;wُJX 7.$,RQ6Y8w38徎(]${٣~8N#$/廹A-H"rldL-uV2Լٺig;6qj~;7d*|ϰ/?Jw*CEF- Tȍg^Gِ:`=CN[?"yŵB3B&K9AN͙5TGb %yB3fHbibi/ "r ()8 S^y 2W&0:&Gs$~QBkOue0wf;ͤX[ss!Oy>!ucdih҈97D>qyÛ0t/U.ITCm: p F/EdP 9r׌d4QؐC`DP-{grIc8clϛi36.{%\ȿ1P}m3tn ǧ+5>݌??]O9O{^ˇ7>ʓa<mho~w?r̻N֖- 1Jiaםk·?tWggS? Vm4H7@yW$ۚ }yف]5Ԏ&WD7+` 詔ts%NI=RE/hY: rBRUK9F:9u\E*PVHz-v4R$Gx㩎F&` r'ZZn !u]+r+3'%m>`5d]qb(4N>{{Ɲd0"%:gM:W^1x$Jc"NU<9YL$P#e 0),8rTKdp&-Po'w޾hlmVUA;hmo(9}2& !NO4,_%f|}:sP ꝫ@BmךY V=g==aC^]f=wٹ;%쑱)km]`HQ wAb ZE;cB׀O:kBdi׊ɾ4Z8:P@"V>܁/C#]r:;x >wǏ(ClbOjmFw m~m>[3+ gܬĶ'5F /;.h;/j^βp.b>8W7たԨ<:Z,n9us,܁9QXO?2!\AF_GŃxx-W ([/6]Ƹ//Gu (OW1iOF]K-B]W7C:e#jIA`RD#8xS'Bct*Xunjߓy:!iI>j!3߯ͦmw j՝΢m:FX<޹>Yt%p.jB }mMTK!.#DhܾFs ^s gJIQ A0ֆ%RxX4ꇊ}.DxS޵$PwW0>&^$,9B?%ɐwS=|!i%es5=U0 gW=H䐘 U>FΞٸc2v5dfE;d:$c7suiW=2('>_ ( Lf ń6,$UHa-Yk A2{KbTw9=Jy@TgZ!t4ғP|e Fn ިg}r\rsK4͵/V\mmErh]YɲtN1G2Edw4 gQ)MLԿ}BmQJyv'+8O ѯA}9̣: 4:e[Z@dŤ@e12R=KC9X槰M阔nyҽ l/>#~Wm'E2n?);~oyo'gȕμeY:m$dhy '$O`SG L p9l&gC'AK6>&`P0|+PKUIdyLx+`kS IjOly!>Ģ1S(!f}+4xGy%`ݱb;Era*{H95 U*ɒ{D*A5 -G 뀧AO+t!OVeC`8hA2@X46"d& AgNDZIꐧ ua{cp~f R#[dmpQM- 3#IXdcWwtL{waO+ؚoSZ$-[n,^hI rIJ7llWlΡ޳jO杺[p!߹4ui,Wneg<\ OɘMb, !H"dRNGesQ=5Q51;PT:Op& 'FS /:8Ŕt"&浌Q ٣7Ix. zc>p~5y$UV;X'vЃĝISVlGT=V}I K0K!<)jU+AV9iE&@>3Ypӥ*})D#*d+n= U,53G0$dJ0U4r6hٛ,tdukѰy,2+&:N摖30PFcv-ZVUjF{Cc0P2^l|5mVW.N4<|z_I )r/=@/$3XǴc>%AK&cVʾ,fB^%9t֔ MAJ&Ю4 :cIUj/P$x$d̥ު4mHVJhHJDA SڙHRpg3R6i[JWڦj!8fm !r>rn1 &uw qQy\5џ]qɘe :zi)cpGfR#sgxM #!T&cqu7\Qpm:ڴ ܁HڕOAqVrt0$Je\>KoacL?y?y\οO5wg8J񀶜/߄d lTI;CTeҜRK T`t,hNtz5wwF#;#+|?|5D#2&i3'CK>nTԆ`uILD*6£@o, ֠V%Ko\y)b&"GOF"VQj}>U~)PDu^ S8^+'i7BFR-$rR9v0*+ӱa zln AD$NFArRZ颤2- t?g4:SJ  ׉^ eRfLR"HW@ʃ2asLz-}g*x5F?%U-7#|i[V l_e5Iie$zқ?A''6a`^ =Cf 2c θddN->\?e^'8tO0o)L9n&|Cnuij&st~V8G&'L=O+k;׽o@ymV[נ˗Gt(5vO^wє<g'VRѦ+'.hٍդ*w癖w#qj~ml1H&6bݬ-'Qo4x5oʾ![/Gcm3GBn\T묂mum=kl=D!JFĚ/i,C_Ͱ7νƋ0_Fz8(-] 6-.`rA|go^ 8{?o޽}M;hKViA\cadRxQn0[b|9 32Ue',r4JJoQ31a>.h[0?, 0>S͏O-Xi>S˚O\}5e޿ܟW*y#25}9q]?h2)R V6iӦo=KcD\d*$Uibgn"Z I:L&$cl kImXKߏpI1c WWYD;(&2YL >dVPb(Q]uUe58(A/?zt@zUy◤]Vz) t0Yi]蝫H_TbJV{ W~AY,(Ew.- =(ꐉ' S&B /RV 0t6 |`d$p"2^NInv]v/tK-L,kHv.+n"H,iWE^n̵+TT#grtﭏ{BaR{JM*=7Fټ vR}z,+<[?EB3񴩪8>F^X{Ȏ/f]*#*Uyd,@'d1.&\r'TXC,}ƌP7XEFR$w)(N} j"\}It06SَFΞW[!,[wӼ7ʣQUv*A! ,ܹ8hTH*uV&Ps(ὒdb։ I@vuIּ+GT4RKM)X VjT<6NojL儮ZUIy|zDҴ,0~}gē8Xvo8HMي[ q?x䪢T“# %.xdK22D;4ɺ*EmK1.ٺq lN)@{ eAFm.1A[L9gX=*UUaa58 Ue,T>y uUь[k/.6mLv(ήgq>/↾ǿWب\&xH$1%KzVe,j+CDɘҁ>R=<-C1|L@PMil4EQJVd82=3#x}xZ݈ƣt<ԮںCNݪجH4. +SBn)ϼg&HR dRnX6+I3䊬($d )X)"H91W} 0[?EDSM"ndr \(y,O5Uְb@̈́Mxm.Q#O*"ҰL:ⲱ\&D.8RРV"H4Q.*kT#g7"zlո|ոP\qvV<@. C*!Oa;q/:7{^e 9nHyJ5Uyʦ%MekS(3}|oc.sA%%1 H?|ؗ?{6A+y+뀽|7uAgmvuN?v&dXgQk-3:D^`yZ_+ χ%a'v6t֩C0(25oO|gj[&o?/?>zBwv0!QW3wOJ!kuY+ QP37D_Dd$.bݳVf :n/q|g p¶6XKn)ltHv٬E0xԑD]I;MQt,ob犐)o*M/_`F_G_`F_`F_P7ZQ>IڐИJ]NA[7 ՃQ HUAϚAe<B>[dT`C$PL[hQ"zêpoXmqyLAGzE\M˘tqHPS_uG缜/O`ØwݢfktLCCOJJ/C9}$6V9EPY)r04d4HvF#jtt0S x_k_Pk s:YaOem6$$ !oԤӅEy 㨅dUqڀVPљZbjѨp35۸2wqj]fǟA/MVo"'Mbi2!g'pr>Œtg2 K,dMw>8^:vO&(~~;77/o|/Zl lcZ']-/ga852UAP]Z`4I j(I߼a$c,LBGAf( R t**AfCF{9Sq-s.~qi<ܸ& <5|R NwkO''w7ys}~lky>qdc˳pxqu$]̟R}5awPv7!R{҉Rڋw)|ڀ}c§ V7q)|hz§ҏ̥RK(^8jNW N;^XۑW# I^Ԉ(BE*$LJņl k!+>dNCMmM^`,o Fd¯*2{*,=!s~Pj;@DŽ,L€{W]=oGRwk/jrC}rh+X6t|]74}E_AT tULX mtvvu4%4I ZEIR D* 4\MbQ >ՂO@H -RUe'aEڽpje].(ܧˋoA컭g#x+M:u@ۆ/h!Z@%+`FK 1+PPЫ30n^>z3?u ܸ,o!"zr]IHVАqL=TCFNoqbZ$+.Θ%Z.t1{F{H5$OU߶*3^yg&JY@+T9;Fb5)C5pbv\X%7{A4J!}85V/՞-zedf98NQNE]F|JZ0#@.@4JW3JJo&f1>:~k3TT`nxFfwVb;1aZRJ!‹1-6bw,kB6u*q7Q뒴 }g X)Y{RN'hb2*&~b]u=Xp0J2&gyf$3@2t)uגjõθ@7פWT |_7i=ìǻ;Bﶷ6OTN.u8[{n qFԀ"D*^v5A^Wsw9Y Ǩ("QEckY1FWhuRȵ-h+D%FQwT@*|Q QfhtRA526adlUaaq,X',|뫯DVlJ[WoAbu;됒d-nbSd;d Xb"0XYn@m""6x(oC{HJ@L*luVDN1vǁ/`xi͆sbN$XP8Mc6jO v/OqEEbY`t(KR BT閛a1aH ԬdXCY$2qd[!XTg 6a<\]`wVXDN8!0[((S<דtx⬈!l ɰeK&KJM:mӑPln*g"uJ٧JGjZوl8yGWUcl6KE݄.(cQPg39X@:\*y&BOo,e..[͎c7Cø-^#8ƍb9 n~|#G-ͽq|pAF@Vtg 3L?%)FrNIǧF$F$9 3w=oQ0"&\\!' A WtJ`ubE"K $J`*%E6$. QHB9{/L|;E~Iec"l8^Ͼ#<wtidNH._)CX-.S_nTu{av=u+LD Y II"~P0AР\θɃ W̜@hOMC\C>l)+jH.BAPkl8}|RRc)^PX?g[QheKc`娢W?1ʻXʄŐt9h@v3D].Eׁ*ֈQK0XZm~Z YkW3\uu54wfX*}|k6:Mm@v(h5$U,+j.]˄wV3,W1\~R|feLfĜ+.xQ$G8$[lGg.zWmrRe-:ezNRrX M/SGt?{TnյoڹKȾ]"_T/w^vo<8x:1Am] s~L7sj۵$hz2+RQ>%!Zon鸯ߌuͬ+L\'>AM͋L/=lsr<9sz[UVkFQVgDYΑ0|rrW_褎l=9:3|MceQ9pt/ozv~}yWR~~3x?@%*}rE®gZti~1wĶpy gU&y4ͣz(-O1^>Z1_- `}5?ݿi%j-֎Ӵ ߧ];d{v5Is!Nn 1nr߷t_]ev#܂:y(TIPYyKw *TU%[b־d16b =yց.Hن%a3^8%Ri(X8b d^iauKR뤋2I[dQgV/N7`6sW SY؀yվ˫jl4f(, .1)`S(y3V11D5FD$x1w a6\=G~tφ]Egp`X *W"smsA$JE('W*2Oys׵w3:KjDΝ7-`G{nv;yqPꓧn@ZRٛS0{ljӘTcJu\׵ڤLw cOl\AsFܱr\Idd5d(]]OVƷ+I4K' lNK}iua.o|\PܞgY/||(סE!qM?܉0ss~a&%ao0f Epg[WM\m&^hRZoub /@W&.m&UrW*曄+fϼM~{`[îZl:\5)I+ٮfzt5K0ZӭL>z?Fw<,>cA#i? Syv_~6J- o>L7nZ-0 iM];f`f jkઉ+i[IK* Er$4[WM`ޞM\5坛o:\5)nᛀ =;̮m7 7+m,~7Ht}ݝPvҮNn^ f6=%Ǘ;Oߠsgh<;?9[hƴbV0umV$NCi]ݣeι' Vj}{LM^jLҧp]Z IV=f#uI`UP߸,7HGy v\PK??_^I;/}u~7ݟocu l]󥳒{,LJlpTQ`IiXʰ1V F`ߧw`s1 4fku&_m kgI`p6pNHo#DC0^xz h(rYTm%Wka(c`b$LيXRl"1+BJFBYZ_JAf;QU/cPB&g"ܥu}Vһ!0y /%l'P4W<8:еR:`@jOZВ$D+:c"&ęXwF/[Hb],|˘UpNE+RvNR"&x?{` l~}7.,vXwE斕 N{. r*=h`j6xLF O²EE :.!%* < A&|Z&x^0>XM.**<,hXh AB}QPV'Vx8[ -x6aA,TW >ON[żcQHY̶ )ⲒF"`;/jwO˻RƋ];?l]LՐ*b+xmp n,zإ!zb8AbC8*a7Kb(#TrE\J:",C)9r -hG;fCd exrlOXjy3r`X%]aH/8t@%e#s40qr o`:L$E-QM=HD-bw7V1\J","aEYE#A ;$ A0օgMl9OtEX2j,a5VЀ,Fee6RʙEE(+,'AJ2hD_55l^2mas\`b㠍wy%7@4=nVVkZdXce Qg WP7t0hуЩx 8v[Ew"bk.PkH祪) 8. m dLh;޶dB/3r~8+C5"BH9/ߪU)QEV]]U]_75n0)VMC|̰?.nx%\meV.T`v \`!U0J$a֠K0=u3QA+d0>?= W++U.j}ճt]Vt.cP!36,g>؜"6x`nU_8L{&>@$D5%i\4'P2˷ncP*AUn5(}C^ρi ^YNaPJfp-thmX/jGte/6%(nX#h/N}O&3ҁO KLC@gI%@ 0g/Ap`~{evNNe6]jåca,YPI-bJ!H".pd׋t\Nf T Vi56; zD(] !ީq1yS a^ݨWq<ZexD%k olJ9{?}QTTҍb4̃u#Uf;6p~<0,ky׷\;syzդ\sy 'Bi- %(c7?sqBex.'=?l_K4z3/\ij+D+%W\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pE^9.s0r+e/ j +=W"p+"+"+"+"+"+"+"+"+"+"+"+"I+ϥ`/ [r+/ R:>A31W\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pESWC ^\au})GC W$pŭ\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEW\pEWW'8`mfb[3 snTJ^Bڃ ڗćq|9|ةL> jaqC60ˡa}= &g\XaA,ͶG|: }x1Γ5mnԬr vN~?InLʮQE%0BnB$1*Pr/lScՎs - 7z줷uqWGh+&gkEǧ4\YV]X.Ҥ ?w ,a 5jWܦs1Wۛx|섙E MohXoB/,'G?E}90 }{+0I{v:+ZdE0 ~psO]%f0ypp{#x>8q޻wWjm;Ee?_d|IN@jQEg>M~\\o_lܥ趪Lz/ ?B_#v>n?> ]g=f^7~Y=^Sk7|vY;"Ζ x?}^+wjp aưF"k4B nI|oDzGWF6'Pܚk?ϋ &G<|%w]#}#jB.zZ=r@$A{5Mkr4~*53]|IΊX2l*k5!j0Uk1-+©?_2k}Pvt]~}\g21Η=ja QBg'A,_bzΪn<:/kϙ ?|M?k2H 0's{Ppx|k5W`rɪFC!PK8GT9)sz 1*)lZ2bچ\ptVf?K-UZWhR ɍߙ?,L֜ n?r۱ ]@9/]NJ(Isp4={.* 3o.j5\R1+@= @Q.^ iŒz߀`٫p[ FunXp OB!gګW!kz$~멕َըsY9M:+鷦xmCz#"?>=S&)&U}GX ZOI)z댔^jSJd!Cw^9?ыd-r Eی1Aɚ!dMѩūJڰkB:ga&!.6O]?5YQaX‡V`]t)c :Tud%5*CYp4H^.{P3 'cYR]ʁurVc7u"~4'm0-NzߔiٟexoƉQJxuFHEnU`Ŝ~M,2o7F Amc`gQ GCxEOVGLp%WLa2)qx2x~\ [J>`!~&f]\]= bgtn.q%BwZ%~WkIz~_ MϘprvxqvlu˦]߯`8\̨`i#}㋫نH#2VU߮z臫i{kebl[Iz{IJo.|[1b{1m-f]Xe4n "'2]ȶT.umnkaڈedb(ڷǗ-]p5)qcV:sC ϳ F{ipq7PG_ۯ7_统=7\x}/`$X˭De:-ua2)~Qr0_ |7.WlvO-EPx+Y~0#u{w̿w0Q[E݋Eҩ]]zỔklrYTr5ynUoq#f#vo zaŝukbnmv:ISO%|b*,Uπɖ/l`b3ZmIцvi#! <7uR7C'kTUB HmPs; vaLfM'ezu~dZfQkvfΏܘgk篑j2]EϓnŠ,ࢽz҄! YYLꠒ7pN6*E*MbQYLc{<̂1%dyZ;k:Q W)h/zm2^d>-"WcYW\HO\The2;Zk։7\kωsvY5 RZ v?OɤLjoNi\[g"H3>y;7}ίv yaFf>i|܁c*)4ie JR%)AcCR;QЉ:u< sau6eKxfʎs kR xƌP%^ թaUgh*LE(TԨΌoK,W95Ek8 rpSM*/Yoe6h0_NtvqGg?Q˯f)q0 %ݙ<*}={i<|/w4|nMoSzcDA&- {< 7Ǔho~؆_۩VGs"#"T+tZ"20uP9iǏvjΪ |*$㱺Pu 'ՅaۦWh7Wi&QI';& ?/)95oВ+ye,cI2LXY)ݍp*tBLMil2h(e+@T{hxw#;Gy<ݙtlzD.b<;+xF4kDC4]M͑ eO( ['vV+ҋOFv%Շc}@-9[ ~ڎYECe?>SwG"39HuԦd7^e6 :>Q#?TGvpRA:z s}-^Z"Pks)X^dJDFt,ΰot5 }H{d;2im?嗓 yV'nM dbz鞐-6(6.?;0>KJ ^dLXW:e?e?6e?eEu9:a4dkU2L[e TI'2L/}S [UIx,sE'<ԁVi)"`Jwm$IU 5pIyE݃nL̟S"$e[=':xERRj-*+3/2s$ˍaL }tE^CUϣ -sʧJV᫇n3wpW xI'M&e9 Q I01 , ; F0y0oޞR naIZdO^R Nd| Vu&(l{nvëgI$r'i$ T'5;2 K(]?.һT('ͪB(YBjw ﭚWԼRrz;kN&!kpkTboU x8N<ޑ7g4Eq0wz %Mőzts(_捔c Ⓜ. >@mNiN} Hܽ~ <_G< ȴ2.%+ﻟޏZ}ߋJ<)́/*`-eJX#gq_:Uxh.I)ܳTmЦ躶]>]bX^ny*]%i׫Q+ռyWf վ-j-f~=j (sQi3Y[Pv3؜tAj~/}BN}J.\ӛəp5Hgc(2:mJ-*-a#^N:S'~NLЙ"?3g`HLGfcBdR28)d [&eK*Fh:ō8` ; ZDl ha[0 ^И;B@.r)6rn~5iԕl|\y[hkZ+]g mޟu^ΡUZ:YL WII:~&erI+90#X,Qd 2~-`)$ŤGKy`;¦:yb@D" !bƊ`N񊸴8oJcY=29@Tyb-Fu9^eRƽc  A~_ a^~ R[2`$fw>a3qغ_=]/z]oycn=z$ hH v6e5Iң&.he,:uy? R2`K.-ʠzPW.=ehI\(wp)B"%T jg>OLj9+jWV`+C=iUgɜ\{*UhwCGzR{w x#,o77> _x9nDHܕFYb"ڇhq{oSƥ2ipI ;AF` FRm2BĈJ'%zK@P/ Cx(VxSjʈ!h#2&":69zNqd:z>7DvםZ@u4˺̊gC^ÅtDyl(wGDc J(V .5EE(0l8O yfBad$T .HdFr{%f8%JGgcD8DHkYƧ$X/KZ<^b04zo=1 ͍%˽1%zK9 < j"UaDd{ڮial/fhF ؇m^d7 ş+3u.>?նl<84BK,hUC)U!l+/#.-8/5/Ѵf ؾ԰l1OŮV* {0jgBhEU ҝ8sE1P^5ג}4_4Vd6YncLyPv۷_+A2ͪ}]meGI{S P e,x_~*~OI`F5i﫸&UpɶrJg%,q ujLܕ>ӗ0Q#ԆG&9s\ؓptGiv&G&IyvQ1oScez4Oә,NDD#GȵJB\x&@M%1x4#!#S>(*9eɠ"1M5;&֖+)AgAx~%2|}NI7zsGW:"8w(2Diyd8u!"F(m fy^Z.^0Se;Ͷ6m綫^S&i7#NF\x PR!"PO(:~2+N}R\pH}BII$@(2AJڇA?0X\Jύ Z3$ >rF! k4- `)< D佖豉hj4BZ"2d#h;E)]zݔő)Lb,Oi-(b#°DAϬ`H-6p,Ugeia9N`.d# T)J̨Ϩ"ė`(PTs0!&=NSFhXwvKO](' dlJa[ŭ!7!۬)FitNUV m>VrrlaSq<4 Cs<<lR;9.(Lػ~OtOS2b|#^N#Q!rKrGN}/ggdtY~sYg=<â?rdRΡ1g%-[n<7պ\רj}ahGh`y kGuugUD3G71 JBx>8(~S=a?vv {57 ve%.r.0d,Y$$2cTD'))Ak*1hIg $ؑ@4̫`]Tum4DJR0Hp:=1BIMyI>i;{{ϰgZ"cXsOzQ#.Bɧƙwz)@}yܳu9Hgv@w4_isTLDZ%'piXȅUܗYhqs.AE`j>jKS$6}^XwJ| AZN9N1Fx\2O(?A^a6 &MDF0GU<2 DSlb+{BSm/qRI.P3p: LGC$ur.Y%vʝ t^0SzB<&*"J"_ CM( BFqn9jQ@K&-k ;vtsNw杪Y-nR_RdD@ I^dq<2)4hV_>hW2kLkS5At҆ 'BktVCFe-k@k=Aʏh a0 ~*uh %\n PBpB9 _[NJ$Zo͠f͡Sd'%q@) )diF&eNq<%fZFjj'HG# Ị@ ^ BZ-\ Zk7Mm36`X'ȺVƪMN_3y$7,|:\:b^ΰr|_VjԖJoUeAz.P]7o_]_\5e~8.eRj8W]oRͯB&|gNh;qM7Yuc'(с/oMs#i79لӮnr5~\%SXޕrPeN:$n:xz[ǒ@:2ܻDe >r Q VXɝTtjk\%|>GÙx HA1}|MA/qU$KX%eKEo4k.m)jD`B8 <+HŸ^mcl.ߊ =ꐆu#k>S흒L2b2`ag:l!Y(sh!AL[ZG0Γy]Cgj&ga8yc-\ϗ/?7n6Tяp3=J? ?QߎqݭN4?zkq{;z^j%j eسJC@_Ygug_o^\U_f WAޭ>nΊa{5ɋi-ZR6̤r\WG_cٛˀ 5۩gW@O?tNޞ;X"}Es7/:? \{s&\ĢrUqpo  " =)˔1Wr8My{&jӥ;n5[i4oHi\Z<8f8(P*NfP?FE]h$;yE?~~H:}.)b7'Wlq?pVؗ˝+1-VP&,GZv,[6 aekW-,dxK-莥P붵o`]q1Y,,ᰴ)q%W-Lu&( QHp%Ff(f*,̆J8Dt< 85ĀHZ"Ü˄1[3K@D<4ns~:9n %u4qZ0_| B2S9oŚX Ab s5BgXj:9dƄ0L\?>I'B#Y4Nr)O떾ZOB?=*?T0Of]! Zq?jme]mJ[[Sڴ0y۪'J#eb:i`W)OKn$ SVpy ve2˳3rYfq7 a s#pZ(I.) ŜHfgR[s@ɔb{Cmv %E'%KY+j,ıtJ_t+q3*T9DCVCC#P|_=Rum=zsмPMSEΡ; \rMF$ }ׂF$z1rS /WfkУR#_s^7}.hpPIVIP2p`('8F)+ ΌM'9W)AD8Dȴ<#s  ߊ`I70 @b4D$O1ȥuE%IStX5`e!3O=<_I"Pxj$Cƀ;k4($#u ٻF$*ٙ.)C@c1k{xzcM4I[6XdYERdQ$@*32+282H1rI/W$n\/00$8,9:AY[c<3{odTJ;fA2+$;2 ҝDLvbb))ZLL0hW 0!$kJ`0=KD(`Un&K3R|<&Q&Qe]oлwOY RvpuߏCIO?%,4 $KQ}tvEҏ+_Fah~`-ƅg}{C!]bd1ڵyMDj SPS.nj?kn3nG:oj)P^zm|ż4ޱE"$׻6)d\{ JQ^MrlȘīyU>$S'S{^u{er]+&#&R*?&7l R$F qI~;<^W^^5|kP)6@ϟj.M4^tB=7t$%J5R6uдS'\jI%DYh:,/ZtnQVT ԧi0DS<70ޚr(b?e_)3\z!?ʩy0OouO H/۫}_]cV=9_GGW {^>8(i0N@i1 }ZuRޜ<-t_I|YBBͮ3|)V?4oAF80\P դQwf#Y^&U`7( A>=ͼ}yO=S}'pOy1C) f%KD3{]rFKA K*V"'MԌ?&pv_jJ)ķå6)3s]v$?‹(mČA;0 q>6cC—Ż[ot 64Tx` Luڠ *Oy=~[dٸd*UӖ&^;,6Lڝ~W| 2jƳMv7j*;HAQ)HŌiA1a\I ̘ \Esm_/@>=F_JGZBEf(RJa Fxb6J:7v^@oq<~m[-n;0,i/ME\xꌓfFB_a0zڋ`I a8 K_s~?|~rtXjk4ba] D":Xg>@;%xWa#@EE ؾ,l: U+u 6[mp/$O] "lfdC4u3X<\:ufDg&AescY3 2\ 1{Oᴃnlb>?(T +٘=C4sVTlf&c]~+] L|Rsb`Hqޅ韹eT 9YVIoRaTXfrՀrTJRGj9i Tc'EZq*Our1ğh}O`2$o|twb c^1)L!U tǟ, %/a: - ^eTؘQu骞1,]UVA#A*&5oj؃WG>`y9p%hkFX:ŎŎŎŎ."`wR83;E0& TS΃+ḁMj!1U x$ )[ؔL""#NDk45DlkjU"luOFOB\fis!ز<ϸ~k oeӢHgUtC JV솦]xIѦ7N:ψ\Kx>I.6WKȁ bxRyywr+8KUZ7|wmԋ[s=?5W7֭~ f_XU&< sPnnIĚ[7F]qcbkspG#WuVIknn~$ݜ#~/"X td痑eRqQ9Dw ..ϼsyk|B]S LNZ)m= 6NyreD$%lmll$l1(LfcBdR280OFZXL8TjSWQ pVA-m=3{㏂4lSl-*Rr|7{aXsY4_^[d*7Zu.ލ: ת̧1z}$Zb&\x)W+?~ٶbC䌂p)RbrPo߂]gV@\T)DS`)K`eDH':r(U"5,YyH0"}JX{K)b΂wDðqmkFf2pb1@ny`QGDN󳳝ߥ^_y[+dZ`KPB@;&ePsA4z-VoHU5j5߲AAKFv͑\%׋ZLrMNyShꕟ}5PuTۖm`.r?z3l;u$ `Hk{mRwMe:{ΐkZ*8ʝ7\ \@cZcĜE NV)Z`e{06`3 C_˟H@f/\?xWMJV,bBrPсwz!bS:%Mwc᤾o=x_h8o>I;~0'1 la&FFHҁdAm$9AWBh& Qtt6JJ18IH```RCdH${Hj2k#N*47x$]S;qO}m'VKx9qczƀy?'$;S)J)^'l@,TTsaQy",QaTKmGo,WHAB `4tǍZJC\/,GFr0uDCr<XT/܆MRhXYuWߩ:K'\KC"b /t $P B"ar*4q8p&tA,ȓqΫ{bg}`S53ֻFFc RL"SV:چ>vmAÅėrxM M/ƥz&cLQF aQ\Jl Px"~G I?ƻBv+tvX6Ē.: oAh4 )c[bzնhpNp x{&!&|AD?q2+Rxnܗ^G0I߾O+X h "KQ A5z[ *_&nta\ }ЇB>R!ctl47ѮH-2WѤѰr]RU6K7q]JpuզyAآAe/x\sCG f)H;}[_Vz*E ]+ ׺RȢ氼jӕI\(QCaf>%Jʻ{ Nxkf- \ܙ? m_mO^&o?nyS4?)}sNUG U,+75/B,_5S]5Oꛋ]3\[#uЊ-(+w&4+AV9 +c%wŪzoNLy0v۫?W@|Uv;&rO9B C5Y~~TB 0QyR} ֥M .VVBiOw.IC]7?w^27:f 9f~w'~VvƓ&O,RSd 1@=16yMk5i =Ձ?oGN$D-˖[8$"E$#bdE%"T3iaDŽr%%7H2c.[YNPg**jt2³Ǫ8iƆre%ȐZ | ˋ2aKdKK&CKŮ9=ZGwZ cRKmjWU( xԇ3LG7?|0""*ts-u,qҞPH)09r $+E:$F;I8FmgQRHR5,3&΁yz~6v$IߕOG7a^]$|p㵬hB8s"9K ` QanaQOH&}Y끒@+n0`3 ۪TxGDJNcRY[aQiƝVc 3"APfdbYR(P0Lc*LVԓR($ptDx- B`8V##!J|,9^RL\i?^S-ML$Kqy\%CG8㉤)0lxL1rDdBe"H3dWBy`'>5`VGD) ;Fm?"&~HdڢƽO?] EDߔҎ``{[^ ̋}EEjn\7>~փį@陬?\yWBI6 ɴ5e`WX!6SDV탩2i * t\k*@հqYK Ņ//0n}wlVa8xF&&.]}/ƦW^_׽xmFHRttكѕ$jhsX'&Q"ɷ35wiYm\_y]]x1k>xJ̶C aoaocKrlW6h|VՐ| A>guĻ{b|{Ow]ݐnfUYބ2U44QƏv>wٮSWv>dW]ZZ:M#U|wI5b=|Q}UbGtrw8yׯo^y &_޼yV?̂KEZK-U2nՓ翽oS[jK>, L~Z <·_) ]Dhay(@\]CmTC9C96.'(6,cIAuWLmNhM<6n?c#ID!'Z2`B#DdBH !pq1>nC.e5}~?3Iw \4E 4lJ4L0b0jtd99+_l:q/A}я?zΙ{w/N;6:3mU_Ɓ齻Yrf ]/&/aN(EKf-D!ť1.StOM?K7C|d@< S sO*MrΜ2&5L** lk3ݤן*; a,y͙e 3;YGL# EJcM%^jo#?zIxo'mwtҶ~0yQI&ɪ68@jUښ8 >_yv]sg4 >ჴt`*#mͭ뼫y1,ߴpmR 4sn3SV~^Zum?l;h_T{W;d8:(^N3}W;ȋ4T'ˌxѯt~_`qw\?Jk0*U=>Rہ5RiTִT28 c1D#/WZ^]z }*Y۱(}@&@t^"aD;0Xθ%|aG#B2 RVa 8cS84 D佖豉hj4BZ"2۱9j'de4k udz2b,O–HaPQl3v 8VɽϛC!IFf@.(qCvL}d1,,VJ\?Qt+}}Fs9˦'fg-j8W4. uZ׷{ trVM2ۭMMt[TeݑeG.+*)qR!tg \D$a K.)Ƒ Gk#'8>jK ^Ȍ ^`A Ɯ qlXM3B  O~|6O̸#`}Gqz?uM~_P`aПξpL;RDA 8 MAL^ 9`%GbgҼFÐ= JIt"%/ A) Ʊх^rg?bЯٴXQ_P`wL{BFRo c4hYԳL#88f`!3 hG9AQNq )l#ȁQfGa6qaeԷ0 ͥ"Ȍ₈Dܙ1UDo1a`4ta [|YlhZaxeEDh!!H;j@GtS/ gN X 952o.wA6q#w^\ui323. .^pql8RҞh6z=^ )@ϱH#7N,f\<. ҎcPeCu' .VkAi3.>RQT8]Y[ BDvǍb+S>xfJɴB0Jĉ G1%QL]jy%"UG8B~ގ#Q#v"GȣdNJ"FH3 Iς GT̚Q Ÿ`lfeքK= T# be}0 Ԕ1|h#2&"Yg@Gȉ5OFݟ#yD~JrktCIb;yOg#X8IAXi<Bţ!وYBbPTI m;2 $ jX0Y}`](Wm\l']&LF2m 3UJI/d*rJwDUؙW"g&(Mf뾸jIDʑ0ƱQ$!O^ 0S6'" (R{WR,iNj4vCP q`8-RLpUu3lްhwgZ~W Y4yÆb2,!5k%YNuwmIU:g5YņG;uGQ($idk_`lE4"$H"lBU֑Y_foyI(SY69R3SNd6T xWsԩ[εpp So@\{. åh\kUD&c#EDM7{,ռ\It<&qTI !+5<$V5E"Lxpp39zwRXŠ,Xljގ>PPVq:=-bɪJahJu g ZJ COT)J ^ʒDyM//czS֫N3=w٠o^l3|C9J; 20m&儰+!L/a|"A'`Fs$8< Mc>)A'>_z/U#.s43&Ft( EU:A H|EBR>{TgLisMiv=>{׳.bvKª|֤DZ&QOȚ ž5)KS&%jWHT [FkW֧jCDM޻]))+@ OpwF_ Qj2$D5m@ no/>\ምV &[Q}g`HL5NB `dp SSL8TvU(=7 *h%{gqoR0QƮ uEnS%ɲtԵqmYZY}]{!]M xҦvE' .@&NF%`'s~ќ!cܔ\i$8O8LZ ԄupT 2 mtC3y_ͦ06YnphjaQوbK#7=$z7_!,[P.*sc̱|6bzSy$"kY:yAfxGi,^`] ^/pżQvs1;ϮUV,/jL5';_IAIZZǘHIYelcJ# hHmoj?(&hb 5%_ܔ~c}7q"Bې FI< K YmXBw M[_pt3l6hsSJs".rˆ}b & -AljvZ2J-]+@jUCGTZ"F:T[F6Y`I1<\N%h{`iO =/9 r3i:ΌNBn<Ք 4fn:5ֆo]Wpմ r+iRR&]^/Ȁ\L %9.Zer;:n\Yt-shYulΫ=/~&іݏsnsG.Cꎎ+5#~@zKVѬ_7m=umMoWzRm揣3$a͍;|^ɮy*+ w~w \r4K/\nteޡ{Puwj-KbRi!Fh (,DtRþ"3vgh6\|=b94kf7*w>7׳MP#[9<Û#cDDQ2 @ 8P@\%HQE Γ)ɿ{eq;z_fVur&{נ{فb9(G"CSՊEC=CHބ:@^oއ\\JX/18k_VޟtYt6p0 ##QZp@2 6+!4q(Q:T?Rcp{,W"Rd@ 'KFRC1Fﭧ\a<]P#'1#RHZo{ YwNDҖ~w-|FgͽZj(9Tvs;}]]I*)-&#V Rz0{_ʾ_vQNtuBV'i4,\|gmWM\R)\)zK#6xDž[7 ,ʮǫߥ^qY) Wh>xu \능oyCyNz%`>^L»{ A|xkf^2\!_˥2_@]z`ǣןֿ\*4/M aeG U\'K=Η<8oٴҴk Vl6ewnlN{BZHϤ0ׅ=Ntm w&@MU|B+v^=:z$tG B,Ot.@ #)6gF H>Iy6;J=[Ogs9w~Zk0} &OVe#GBZ\T{AZ (Bʏb+nqKq10 f1grH8+Q׳B]֓\L)`-@KZGK!hL8P=x:FY^1TkjXu=lj `aOgZyP]lЮ Cǂzԃ2:]]EVJ{(r35M n`bd-,eIȣ웷Ë!f$Us&͕ :wYd`ڈ|oXqU>L߼N"uTa~0 !];2~ j) c~@JHn8b$S+aDB?^eW/B1R)̾`p&3:͖0{;%\kו$xm+՜֊ѠÇ<QDZsO;Tab@&. /`죲ˣ|N=f1|t/ܽO)zGF?y}/jb֔G%^zI@M `"s b429sC? ]贴:vფྷ%<ۄ涥\\xPL'IĦTyŷ__Vv~Z(ԁzzY-$~=NK^&+wk DnmXU2gsJ9FVGs)K.H 8pڦHYp#ZLBd6RMVme, Q="hKuo+Aa:T)rR[^%^]{Ո|(c wJ .FyZ_I72!@gÿ'T;UCNj䝺\{fL dy0x-JM\r܀ť/ S3Ni wQ]Owf]Lh֗Bř _' 0Bf2ܩ6 S1@C{*6/OI:Ľ|)6wpfR lH;Y"tBp:Y.)$r"`d]&v+",UWA Tu~V}~ոhPx"}Snd8"F;&{J0hPu\wYwd7O#/r].uU"q;uT_Qˊ^֠]RmƀL12e/ I'2%C`R\AK/65zTGF3Ōantr+Ydj1QewfsÊ÷)O\{@Slgv\D@iΊX2d K9Q.& yF#YUC;SPҊhdG9$rBti9eH զA ~9=S;Q̤Ohi%LFWcXVn~{{!┻kuϺ(5I(K7* /~243YQIa!j$`'QN;'3vB[",Ie)E[epQJ&оyNڎզ!T9rpS &3Zb%-Y R9 )xZ{`$dp )4- 3ebm{,h"ppn1-.%FH9l8u6D2arkj \xL},dadR|w}s&rp#nHX+]E.-;/o/˧00O˟7ZZCxS -k>l7|%RoAd!GT&%ǿu,y`*$Uibgn,$=&16Xk9Ƕ6̥KӲC8xC2. 'L"eA|Ȭ9A/A0}<T9ԙPV==S|M7pdWh/_|?_NK$ Zzj0A7pN6*E2XT?cnЭhƝ~LHF -HY*Xҹ ?f2JrG׭uLX:}VdE"%[0Ӯ -#2k׫6})tH۷T->o&Di&_\J1|̱ZBJ U{S>P}WGBZ Rz^_Ϥ_TC~2y6SUK[qo?{g(nwH *f$оA6Q -}BQE㏥Vp*Z$sΕ`J_"ô%RvΠ3ABC=JWUvS$LqG |?yyMĻ܊dm*#I @q3)fґy0Lxw iOd3r`=&I~{DgOδ ߤ~x;%MΣbIQF)/!q9dQ[pN^8g%cH+Il`'~}c8bry2U>A>L IJ(BtS/;Nsͥ}{{m~JѧAjz^񔧁zq؛R;~ =?~s^}n1|7}.9W/sȻm'w6'BaP~k`AP|FQVnGT*# ge1:]*+U!Ʃ|_&;4-1/k[7{>Ex8s{ 6e{=j{ezQeGػ&!jKl/gë׊}NiZYY7\LIKL6uN+i3PSZ rƣ"h mk›w=}Ҫ-]ٽ'rj MNhr]uC/i95.ڬI7ՏWZ6ԲQݮZOziX٠[-Wx4Z›gބI,^FY{g~ϼ+b(9yMc5N1.\K=WUn#)LVS?q6]"in 4BFPT11M"k"ϱqYY)tup ]H3:D.4&N1EIF&T<*RD*H9hTp:ȑt4x< S1gJwHgvv]bއcqw=h{0?!O0Eyc mdĢh(Qr(2#2"0CJf/hlYIo=jזrt~H3x6W.>銓7:1a:^O:Lea:)WALW*E>~y Ik9FHၱkTyʫ[8SS$Ap$O4mh,NF7Y*/ Rɮjhr&^4仂3±E$8ʮJgE! \w/ uUg~'3hG^ }чBb>R>cthrI$%&WHfR7|Rsx ǎ]aCJڼW&]qF }LbD, 2{|0|/]ԣ"-t^@j%kl5|k[PkS Kh#hxxً !1n(IY NJַBdV¦˪EժNp+J> -D-TE 0eu!KJLH}*«} Nxkf*wg.o %WA~K957f)2(?Ul4}}yˇR|m{r{$iU UlߗœnPiZӍBl_U,MolW}+er5;Z~߂{N(sA1P^tW}6'_{i,.YncLJ( A|jAyO=SZ9 ˂%Y>:9$ʾ-J 0yZ5k³u-\ZY 9%^U68^rƞ>w^4e &f Il[JzGiv&LRU.P0A{Uܣ1ǼUIMeRQL6Ձݳ`v+DD&dɾZIgoTwJs"r12僢SF *r4Pcjm$1齕鉹*ڒD >__9~pҍJGZBEf(8 .@Rh% MVW4J춭5-nz'0f\XӾz,B-“U8YȆrMХy \cZOQsDiNMZHlX49H{#J768^ v)'8& 'TUwv`R)S©W1ϵtֱNKhܧwY-TS3 9ᚴ L: ڄR y$L6(K)>^jF L[#gψ4=|!WX(h>qMU˵Lw(/oo>]Znx-k-9l:&Yi5,+!8i4 DO}o]SWvZצN=@^W.1EQH[RSHBC[p-#?$/umkB P.u^O)lMR;_T}5+w3d8-9tY`4sE{gj"ȮwjnAFɎZm߇z@G(U!,Ns31[ u%7ih 5(O+Lw帶xS]=iyHG4|ȎP[͌C|d yL`\@DaJjF\[2 =&T!x92J#"2 qG\$JOƶGZ#~]nS C9*2qDZHZMԞHSSB*rȍ/w_3(rpk;#"+pU婴xPJ+FO' Z(}@&@t^"aD;0XR2.h !AXшv)> Ap )[A8c0{-#chnD [#gë">h.ơ,b,2P@PĠ K14&NjIc{)x$gD,vGEwo> ,`r(S{+%ReFm5\M 6wеNM@O$ p6-i\@|co& krVE,e[7YR`Z/톄3w?U|oIC,b08g#Ǎ;L%weS_CrއT%⺸HUV+$S!hF"g8"!•*;A7*!xeת.a5uuU1J\l%護ʄU"1غĠ% !542Uc`#P,“*$|,=ydؤ{9F{pu/?PWW7_.8#8- ʘK^҆0)=Z+0Jgd T3R{ae +`"lJm&C)8ѷc֡sfO#2r1BFǡVS[ejg Y;ԀӞ׼5) "7A%ӕu]UrAuVڔzhr@/h5!2Ek4#ݚcejG:ĪGp09/qi, cQLD܈`JiXXY*+6UWQ1BYQ9eT?(ivq6\&TIrY^T~y_>F xN?Fɡ\4sd.f.>;K^QZZO{V8}Udh\c>>Xrdx88v`xxPufYIY#;e1xYaMc 8R я[Ubחױ0BB^ʲ,-eN`^,V6B&̯) '~B2p+ s`g**(5r.5b \L@gsPZ$`FCU V!(8 B\8:2r̖e`r:BU/nn>GqDOݪ{Π.H.XcnZYk`ZlE-XV+Lޫ\3&{ݳ.j.Kzij뢷FƂ!L ^PUQQ] U:jhv<3PEW]o\Yb)uV̂V"&$>wnW7WV3=bt8Rʀ2xcʱ@ +%Uiԓ 591 yL H&T4POkqWP6[*2* 'L WYanW4Jel幭KOK*QYa<'6š,Uবk,\GTy[!Ceϛ8GOlbdbjcM0! 66„$(TЉ Jnsa&9) &\\Rn".Q#=N':(k[c +7wO_{ ֥ǵ؎>[|/ƿ8X,ݜcDһ.׌ş?vɄ=Ux'im6^?w &Ok}+hM??,4o&ÇK:׊&x\PAn3H6ufr5eBpcѼ{T(lXh.*Q ]4x]ʪN/zOCk1|o[}[y+K-P+fpVp7oH?|aE<|0;_$tyN'7o+mԿ>b)8gWF^,oH$ܱx kk|z =|M uh|ps-FL#6sDTkm1REfhiiu+dR9.?o{5N]e /W˫(,PEG+_HyN~\t7>J;x+MCY3unnny*.4%rAjĎEID:/\qQc |j?;1q14Jixώ$J‰ysm1Ӫ4eaUP{P>a\<:+r81!.oo[jXaup7ʸ#'}-?Jq5+hN /u׳-$Izvi6QWUQ( ,((T (Q,Z\ʝ|D'M/:U%/"\Qa+*.*툥a/onSQm./^=:;E> ٰUruuR?41*{~cկc]~;1kM;Q_/6\vejFim_iF݋b{~ccu:o8w?;J. ֥?gy8`w W5uұ+0}r&iY4广{xa9hCIC[wj㿮In' pL4;yyy^rEziNmv|.jZqqݜ7A}ϢPSws({C7a -%;wN}y[;m|;{?W^ fT?]pq63#=qTL 5qw9<Fj&wڧf4N9CV̐.j%9EO5Ew"SSռtHE˫:cX~jw0`%C#d'NqаI$\a!L$U L*[׮r|&yB2N1HPJ7u\J7+&pJW$8K)l*BO>[ٺ#1]N~w'796WM`ֹ4ϕLP{i"M?px3qǿ?O;S^/qM~jO5yOgizj&jv=gGhB┞ɅdpEjOx\J3f+}YBBdpErJWVTg\WK-TB"F$+;v2^jͩ++RȸkuBBr ~r+R;}gTjQd\W;!Sڻ":vkb"2]WZ !\`HZ;u\J38K\I[]?ª fB"$𽙅{[Jq}RM -Jo٥6y,;E%< PRQsDOrb7Zn7Jݜ9 VmS:GFdpEr-OWr6u\Jq5G\Qa%`ʐ\=Փ?'Vd\\َ]oV΍_ټ`R#㪟\5&r?f3~*l\ٌCK%-OW(HW$wPԎe]S9%W+A S 毰w=HV ՊS)d q%1 s HuE*wq*Np 4d+Rk`"6;sĕ=~m:[$%Վj۩TJq5C\d2ˉkwPǝTRn:#l4MǪ$cS9KӯieY+<4K%߫au VX4vԴnIAG\ :brmn u*uL/厤lʇj=INe_^|TB 6B&ѐ\iRhHX4yi H S V"}r 2\Z=}rRiU q唁nР`e IG%c]Z94Re\\]s:"Zz f[KO 0%|SKع:fw-=ȵӌF 6|dS\;gOhJ'zj.jv=דGd `?VLW҉JhN%+JW$wWԂ:H62^WR[DB"%+K*"qVFZ kSmrmIcڅtj8es/'Ré'Z}][߈Uy&SuƿAOpb=9 "zzГLn 66\\ɓn"+@JQ$xT=: Pb""[WoWcsƌxFSa⪗ZyC}RL,Ɠϸ:9gBp2 jRz31u\J W3VAB^#PBD*Bb=U*q5C\IO^qP\`.ӱH.TpEj+Riu5O\Yt+@`wUl] oG+2bcI66Ha]FwUE")[Y俿!R"%JYJcfgUT7?_&a̧qa+UAcd&@666t>H*pĆXٜMRB!1e_ ,&4~ghn==V8W{:ꜾxS` ^Xud44>gדP)[?\k5ʹYMV|Qj^,3nf'-CS@Wr1`KRCiO=4_5ҿiv5I1ˈkz5&?9oN8LlYʬ囓bTU+Ý ߿*%_B7 FT ss}8 ZڄUCirz/YZ+O#l Ѧ><{Ra-a-hQ3\ҖzVќ8K8,&W ٟ7MSPZWeU Lը"tjo!>{4DeϤ}i:#!jX5N*Y,Z\DŽP EwB\,mzD DYBJj eI(΃`a"Z3zj1k}(2kdcMW,7%X\XBbs-ZWu6 j^]X|N\}`v~(u ySʹn0%F~[z1'k*w*ʫ@;/."Li(1hYvQ:g x̞ 2'KLtI6,G:I4Rdуw-TӅ XӵUM"4š$ʙJU`u\WdǒF@E2N UwwH/-eYjz_sH3!ut4+};;A>!W,8pB8So;ǣ>FSpGDg\2q2f'-q.$u:˓zJx~Z[t-ERTh!1oKۛkG7K>[HPNDv5LxGCC"ʹ\(A^;=w/Us;'ST]C&摧M&g|ɻ+3*ѧӝ㝋0?;57^ͦg -f4<|l>_s3 MATII5Fƺլ RZLQO4e*>.]_?Oo#qѻo^7W/߽{͛w\wݛ^ъ'iZNv]J2¦gR0j_kף%@;a0\Q~Y  ˏPhW-`QĮyl Q_ՂU5շZ:Oղ^/|zm]QJ^)5c!N- Gvh}S:CGÕ-l[[/nGĽ8BD2LE.F憋,'4,lƢW^ˠ͑#}ng\F{~;DbϥZ=Ͻ8ƝpaN8_{ԺǷ|fQ6{9eV1I8Q)eYs>x 2 NqOg?t6~6=~db2lO_[v,kq~cvv9&s{m GwGx7$4AkCa6%1fՋ^!0 a nn1ųC2N'7O/OVՓCJ)"z';W$. xlY|D]{^rVz4j~?_u>i{ԤQ.^6=NRg[,_L4>DUBmOzD؎v ݖv\4A0q9W>k!x sR*(*{4wA:qis=]n @3"dIF5 }PC*zTΣ{m"l`"̊LPg\$&d6jkߪqD2dk>$dYHYq5lGN,brd-G5n: {;tkǫ$s<+ri6Fǹscie9iL $y=꺚o9jv=پMg2} 8%|6*| Eغq؂c2'*?VJXU9J`\P*ylBlq>whNpҾaȲp %)-ǔP:%sE&O K)@֕D B%UЦR2\tܧQq1zP!zCu46 ӲaZs هd*{5yfL{[*g97Ik9IDAX(VIsW$x^nH7LA")RKLEZeEm9w+jz?ix2K~a\V0_˻59y{өxʹ^p:lvw^ۙ(y>Ž;J*z) rKݛ>*MyL"XXbQlN MBV[yAzng3b7s%= 8m1(0$t9`nd\ F֚s72gICPꈅOMg2nofy!*S_0Ի@A#vR1d#<0"'kY@Xz$cJGflw7ɬh ( X0PB+2q;*9˘sLs)&$ho9{spG>\lfey8)x2I L{bTRXZ2 1X}ㅚcb6z9j[$Lf.VLY P)JnZrhTCgJ€g/#F |NVnPΊߞSgʏf4BWNc.,)tvW?c~xn_ikݤ a뢐L d68b{83Q]MEBa$ ncVЪ2k.[H<7(ܛQ *2{_^*~[hee՝g}zYxLKxAwY)3iA'58 `:: >.\]HsՇ ˄cѰ[Й;kHE>ג!k3;yTh_[[xKSL[h)ؤlJa=W#68R$#Z2SJx->cږ)RL?swЌz?lzO}: K E\,[~:9x4ܿ %_eJ<϶`/߂yL^1flB-@=pa]25^v4@F{Nx8$SB/y9 K<%^3*$+lx|V>0^Z;obɝ"Ex)Ahp&au(`M e6|Z8b}hEJV#UWUs.Zw ;|hZ_uVA bx9ɡtyYz}Н#ʬ^ջ{JPyAW^nPBa4DlMFgU#WPB{t5pY7.]C̣˗<&ڃD-4N7?n0uݼ@^nr䄫^%H(<:mr--6a#tVRҏ<.>#g`HL:r `dp kaM`2(XRn>hVQ pVA-m=3{"/hl;E[lťcRП4qr7B{ɪOdM$=1gOpGMm>sm kUN e{3YWϣ_-9BB,[JL.0"~ b$6n;}Ƭā&R5+c9Sc B@UZꀉ qh1y/{KWf{n7&>c^I$'M&i jB:N*GaTyd6:Mw[bU8oȂXr j0][o;@`f\IkFhNj:Rjsfȵ1O kw1)7/`D=u.H<HX?:{,NAaqցd1-XA/, FTEǸίju3tny$ky:k3ϗ mmwT@TOqۏqg5y'ʌoa,EtA6†vfjzZ8ŤC #0Cn#@6GGg!bDnQvo91,N8OIJ.C\VвN2o P_ϬE"( CDc0 +P@\%hk*P`pz4_ɖi4*kc&PfTetۨ@ 5C,Xm ^ɳ^:N>)ɮ=r{jl+:0%G#szfTpF('sr@m5R⻨yz3= Ϡ}7;P{B,E([Bdr*1Z]pg@ER#ЛMHiwc a>'ua'ݸqI&~Ĵ'WϧsP"ՂK@ H8AWBh& Qtt)("X{A4i|L:`x-'Y<^b04zo=1 ͍%9iԎSz!jg.#!<&77J5Q$#l(E{^<\' <h7ZsaQy",Q,zPU,1rHB2*xZhr;iy^DQ6+N{!dTÁ 0yZAD׍yux;yF_ANyP1޽72*b Њ@6Dq[[n7,ejѰf7mDj+RԶfwc-6@<-*Fø@d62!,K"@;`|{orzqZZm mq9%_?*K(LUV,!io4߾R|wbYTyxli>>,E܍\R$MoIL)G3>/e6}UgBجBag z7۽vË>f?Irʦ嗔:L)qE-M/WhjL/ZK&03jj>5pZՐnd +ZS^埳],Dˍ/ų[3}WvEc}鼗d^ _Tv͝c >?~ߔ2@Kۛ_ֿTt7٪'_ˏ.ύ`  ϬJOP17Q@?WK6bu/O'=!V _f.gjf>B+ַQ9,AZ΅ yZս9 <*c!֟e` x}2zrgY4#,=촟v€1U eěd`w K*@+I`w}i?VqvSI dC2lj]G,Y$kU5nx7ە56Ęa4Ϊ7U]C kɇ6'cSl_@Q6Jtzb.ϧ {1c {RRyɪ\$b{ni=: %{JsVj蠍m\oV-ުƴQiRKJx|0Z]s#$Dg Sx㡖"7Mm$"!bd 2dP[̘Vk˕ ɌjZ9rmxyE-}`kH"}/ R Cܡl>bT7Sb Z)l4OUz^.^0wDF}Cҡ8P|-'ݰ8e -£Ǫ(%IO|\-0,L yhd_MbaၱkT V0b9Yw9 4F؛a&b@D" AchLz½՘9`7]+ş?8X/hD<3ѣ;szT*l>g/w>}4RG7ǎ$ԜU_>TKa@<K t} IEޞ+?O]qo; (ݻ0o?$($ûwVJ[EJB0uQH&[2aΊnIO|[A.Knu"cpFFc-Spo .KɳnyS6y s l AfmNJ0YYeuC>~A=Pݬ. LKxAwYg:*KA{166+gQV?O@`.+=i8z)\FbX.=-PˆrWS96Y0ɲ #3/9D4oݛl4E=&P=ss,3 \q΋6TOo7̧tr+}鞍Mי56-iߟY400_A?D3lѹȚ F<%h;EH oHb~*!IK7$)x+ FN@` zY[pe'WIWsW&k$T'W .}2\%i#$l8QvC+ۉaj;i; \m'Wd "\=uakNgWۉĩUVc+L%BWWj. ,~v \%iѳ$&\B +LN\ \%qɰ$ R^!\1(Q'W d ĕd U;\Z^!\q& =v2t2pT e*IYWL#&ЬK, efDF{-1Ƨ*t`:+Щt쒔W )޳{Imzvc(Kyg▱@ߊ-rumc`bҹVpb#V rIws'7n㮃Я. &@Ew,[89x+b_&nRlc)n8VG`RGSJmΌ6>)n#;׾=lY{ɱ@5Q8wJÃ&HGG}\%4"pz9,\۞-D{Kja+pΈd e` .ۊhoFf^m$ [=\mI[hw(Y"Qd J!F cKQqAKaG#Bp@x2 RVa 2 MX$"﵌FMFSr ,q)4~Zhc:S$fZE)6%EbY $/UFjIce"x+X 9N`.BʩB+,Vm#GEȗ6_ fq;d60HV1"K^IN9bŒmwydU),s팕mugqpt;qwDo59947Hnt|w37y^WdrZsy餍YV~R7}*_ʻfd"H 6 % MxIγ^l2g3@qL&}\*XÍ k2-ckm*$P[Zd ~ u EN&iMyn`08O߸FSr 5و$&8儨Fʞ\+$E$ZVֺ[kp;)asT (sюLM!7mW;~P-&_>~lu R:g~]z…Nm/Fx4Sݧቩ>9SxUp4΃8\6U;X6Q* ^#!{Nt-S2䈠LOArْ|q z|2{m' <2'V!EaVIsy5˹w,H1RewfsÖͷ~wyk':v(Gmik笙6Dd !)tI<&g0Ba6dl|D]r,td:,1+&&>*p9y):$ho;{;oF~[1qV"9JOĖF@05wGb57Stirp1*! /ie43YQIaSԒ՟9R8IjZ0,͚h .J -Hg ڎl;{2!IS)NI&\js+g%HsLN!jM#) ١)6 {MhXG^Bu1qCb [S]1m#ީ@zU FUSvKwcgS0{I`1%5"3hiFCVD6qyұS*u;iXF_*-Π;܁B(ne.l礷!inX)CGw6X=Ov'OqO9LvM TIGAVBKϤ} fJ[gD!ZM^94xܷ5`0zw0-kO |m3$ٓXIP(>RIqI$F,Zǣi, QEkPXeNRf *;tz)XeD l :8qVD6+>Rp4- ZyIQ l)cTu@q95_II%gPZҜe6~!:#w]E:$͇b%v,;F6 Q2`/AJH6 :3x:,9͇zD9fbH /: W5 F) o(%i?&獮ҋ¢R(4sozWa47㔻NH蕭K/ދ27+C3-MgRW3 q ;{gpdqpw6>Á:C"xHŀ:߆җ w4nt9{Y$Ӻ`7ocQyb?]&Fϫ|J21'׳]ggFR嫍߷N\{UM(N{ۋk~]Tͮ/p;jm4.;sj6ߏs7 AH;KXSK%7tԌhnƺfV )-pT'2e wܷٿOܬV%ZmzWôV'%:R$>zrs~{Yly!ar/oHDW?ח? w^]y8bEܽ,~3)~!؟6ijWtߛ3*=B_7^OG4 Go0сϚ뫟oZMKiZߧ]+7J%:]CmBx[6 &K;ҌꍗkNu3=[:"Ð4&'VypEAz a3ZmN驝 RKXN%J[eUDP%lP1+wΣ$I0xieOgۛmM&J'?}|:6y_zNa;u'"YLb?LI4.4j0W-A%o*ᜬT`!e;Ģd$“>¶bC3:Bw~sCJ lZY` t.%TsHW*!f2Jro^% [$}@`YHɊ:E [#U٥,bpe2VGPJb-_iW̛WT﫯.8}ʱ< XÞ͋ <>+D.`^֟P~4CP:;߲|Er'mG?h|wT-_F$-Uox\wo?[\WR,WҋgIYi1^ֺă/=/.fş砋>sneA}tE 5 k4t~:jwy9,^е%mݿEG_^+օ ,&jq7@ߙގz~[oxvzdztCYw7/z3}-9w+2X35NOn&d%f&"LBe6 Ӊ=NLg t$k PމiC5Z1K61dD _kFS:3/i% ]Y.#vz 4IUܧOpYF-}[zsqbĈƶ1k[͢w3B% *XC *+Od'X8ㅳx gP)%DF,p,& P,Jݒ9jr:ȑSwt;&(#4Ĝ **_ vIڶG"]ԇف+ LfpAe](M[[%OZ*_OP :G s«τW?Zڤo0Ŭ$^9f\0*J*Ù?N 0Ta,Xh9J^,+yvǬJiKAgN2.14ޣt":"qXpQniK'6WDM+'ɦ1XM@d2'4!#N3s pRVw޾'mS LWoy2ߵ1bjl'~Ls2~x7!CӔZeRIU sȜjb@Q)ɠV*9'F/T's aU kD>8|H1H;e /5TDC Zyb* <ˈV _C Tʮ0ABC"D`ӑµ~\Mg+ <ڠGdAP{SGzo va4pqp}'@~ ĉv=~@xgJ,k@jRzTN*>N0m,Yˬ+}IL,GXzȆ]l݈hVQ:U:9YxJ~s3^|!"Ds%Bq!=W Ȍ ͼXc236 Ӳ˭e^NCn7q|:bCa>7wx֎d1k .{VŪUZ?)t{P:ES,zdC2B,*9+nݾu{y͕WWyz~k߮w?yvAO7T\e\_DKK^mƚuk8nv< %Bd/Znm'KDڏEte%vC-=oV7)`,~uA6.v]jSkYJ J0w 1 ED;o6GGg!bD^j`T9F$ڢXYL^jʈhA #(H8HlFΖo7~ VѸ"nPl;mvV,?f+˯OئXjQ2 ,H*"Ēi iw1VYE 62( FT`p'%5r6KJ=q2cS@ٻr\g9)JSٮ,=YnY f.FɯmGE%,Fp>] b.ԆQ(%s#wvgq=~=1hnv`&cG1X"Q c!ʩj"v!P=CHބ:@n~ XAZ>=*G.ނ}9㞛n\W=l˧ùL(DjdAm$+!4q(Q::%TQ!rƇ  )ve T 'KFRC1Fﭧ\a<[P#'1ns#u``57gN"iJL@w[ /Ruk+Jlj(9T <ۣX49 T&KT);LKmGo,WHBOK3O/}=!sC"b h^@HF+tAEj=6,Uip0){#*ҿdWI$x0$*SGǝԷ&Nj m(IDNJQۖ]n&<=-Kmx*Euø~m*eBXuJm j,D0 @womn"28?'8SJ_]U91Q(ẊhOϧUc6F7.ĢJ7nKKY"-wR(MDg~KZ.Cm#6xYG~,)6Iv=HTK .PԸBƫ35µF(3jj8jHtV/2yfS,f,@l76g[7WEcc&CɽP͕c .>?~[JL_~.AUA]tyn kŨo\8[}Ep*U|{`>j,1 cJdrR5'=V_{,9u%yOЊ&o }8Es5GsHcA֟*1T~\p~{crgYt#,# ȅc =J0S,xW|!T@*I`w ׉7Uj7"\q&gΆpO׈[^+ʚHP+k|c$ZJW;֫Dޒw}"?5O~Pc3p8([w૟3`4@H]tpv+90"|Z:`kIߊL{޽ H- !s"HfQMRFN]k#<1JjWz]'az=z 5n6֛עd[l<$Bzc߲^2Iݝ=ݩ愖'dkAr9v96Pix`,(3xn23Ńt;4FXaMŀ!ZyD$ƊJفvc"{d!{&g %@z&6E-3ȨO|Pk6Fj_gW2݊5qq촣}iG\yętSOT*܁c`ˣ{kXIY{x|a(zj8yZ?:?$qx=~xè%qK}4W2:E Z)np09sH[G‰uXTu=+.cNA2$Up;oB!z jg>LLj9+jRNNjG׺q@jޣo^ X^Y@[ 2CQa[TX;Uo?R~Dg` G-h~d;x6gt˩ Mg rë 1 *%VIZԷͧ?0dL'(/h0\XeMvL ‰;_ zt|90eTK v[֖D-ImbAOɞְ${Wv"XcgUm7rREUۍZF%T5کjz,xlmJ \%rmYD*QI:zp-  \%r:J*~pԢWTHU"ɣD.WZypTWL"L:J⣁D-%WJ;zp%KF$>r:JQo'*ew*EsZ<[t/>ZX9bgo Iu\Lr29=Gt~H*eQهɩ9ot9瘝**Xws=g3vmDzbw5zQJJzpT\z43qo+>;-b%.Me@`~}ULؕWJSTEi|eFhCH(fAE+n&ՆS⥢Ya96S=A {daվVjbeXb(E#X'\k=qH,JqmƜP$h`/GJjVDLD!e!x%jИ tk1hFs+%ZZElWFNR5R;o 2rgDd̩M4PdXM$xAa#!7ƆZGb4:c !H`F }K`LG"6xfe7܁!S@Pʺ Rũ1*0)ς;O 1Rb)| Fp4'h_0 Ta8F'JjkP2DS*x"c!HNRDF&7΄(9ǀs(R 0(RP@.jl A)@¦Qk&:y5Z1&PfTe@aYo` ɧ&$:d`Zr"5oX$T%Ey$ 8D;f`e)A@Ny5xZR !Z4qN9ťzsH_!ӜD<~ 2eR#JϿMjnE6ٙ Xx9UeDV: *OA2Rs>IC_TpuTK ؒerLh/( ]PS=E_?k}Fo"Bڕ=[a6#UC+2WLqCf2a=rQSH$A,ga%qq|Jeq3yvC zT,,8N:p0wL ?K1R,pΘ'T HqAv[2؀p R TΙAF%4Ǣ5XT5H'd"XP!_|()(H󼯮Œ8FGTOu!))"F߶Rrp ˌc-oRH_i [, MʻS`.d$1`v٥#PP>6iIŒ'23S_ݫ}3Tel#瓊q MZB`B< c;Op3]yY'MlK-,>ԂY/e_w[V'fILㆷ x @pp8L$OFVٕ4Gte uIJ2# ,ÒT<)` XqQ|IŢ!ʃV 4xEb.(2U fʇ)͎ZCJ1:-dxIH0)nBʚ}Bvx,)! A>XTu,ҩ3 yIe*F.ۆjBZ!ATDq.#1V&{||IݼKX'K34݌'[k'=H#26!z;d6Ns`}Dzq:EjC,T&Pwbӂ p^P @ SQ!}D,9{Ug$%iBEKHg_&c$w  uPtP*\bK'yyis e&F̓O$L(")<!ndb8I!dNdM1Q:' AH*@P (Zu<\:il3368,K}9QrTZZ8X+;'MrDTPIo0JiHP+-[5 ܛ3藣bZfwr!%g 7 "}|VV fVY6)F;7]nگw}vǴnsT&KOA0$qnSG>_Zam=23Y:nT'M32\m=n(^bl9,H9<)9ꡂ|B1C[ 9Gy_I'%V25t+J`; % g 9&GȔ<"QvՓbơ0P|__",fb ursHZ8Zwb,'M + F-h sR"=͍u#&_q&Ag*]q̦%?hvn;:HFuUYAneF&_uMh'rH&si@Zip)w6xu{PiKɖ?A$޸2Ts[5q5H(b>P+J 9֋eBVNN 9%pUƞtk B%R̵W$9! pQi|\l84f:ŃUBҔE"&b9`uMf=I`]EHf0Oх˟zQel$`M,ҲI?!tAyG5"9`d!UCQ/9YAke2jwa4;Tugj-]4z?(Nǃ}tK4(ޑiaCvyX7fwbNޟ麾oX}miӧa'ߴa=TЫm(n?Lg9/~w98x_(oJ(+bgv)Vv]a^f,d1\!sf! WRUh H1p+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRի" W8+MqU4p+Q W[4\:+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕpZ!dg\p c7\Ae&5\mp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+ڍd1\8L+6nN;\mp,**+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕp+5\J WjRÕpuR rqÏrWzbh{]Zw" 2'2#lhC]Pazosp%8vVy@o(jmWCIqA\s ap%r0ٕ+QjBy$\A093 r(]WW[UpCW\W"7 n#DJTf-7+&|N3ySsWy>wr1{u׃'! wWI90|2.>|Ҋ{W\L|Nބkӻ 5p|~}RטuD= kLP|_~Snӻr/+;pٕ?ăr9louktqc/u~6|Sc%/?默;r.;yĴ/ )_>]h)yS9VC$!q{_^%u,-;*&fnqr0[zI&){Zڎ &@}M[cFețˑiWRr,>y~FoPO=Sɉ\f\{%5`%<0K!+~\)Qq}v$\Me5Jz;D%iWġK1/W vޝWzϋUj\-K֩/\ \CoGk^nWq%*IqE\9"#eWn4 (W`Rq\y @< D8Šw\Ufq%b #Jq+'Skw\JG dJ0 1< DͽJT:՛*PdA;v.l2gM>YqC0 ɥ0 Em~.0YE&1V_>G<~i>+:];\$s`Q3L'O{IҜꮶwiޥT3O-6Sx>"GkGڕ~8s W8~k:zǕdM+}3 qv' 7aNA.}9=LWC.M uO' *\ שt*UP\=wmH@`6< D j1JTz \tu9+(+$Wĕ[/qa;O> +=l96ʬmW*g6eWk0 Ժsr2+FU|Ki&51>N=oZbpJ%qC •Nn\lFFzǕFqA\yb7hY' +QC]mWd"wЭsɮD.Qp%jC1(6+y Jz? DmJTƬ 0 W8 +6q,bqE4j&^yb{)/׬磆͑6i4_ApA&]7hyz@F83/\'(y G =t +0)+{\JNJM*''e¦+W^Ƨ %+=^]' ;:Ҫ%v]Yu1|P0umTlu*jhȆ= qڞ=K\4P%'8_6D*|W{~RԾ+9jJ^^z[di \`vJW6qljr+LJ0ٕ}mW2j+ȵ&+Q/E ⊂s+|#z3L1(j+QujbyxW"8CJԺ;D%i1E\6`=A cDCF:h\wdkkВx;ߝz-+2$5L78yA8J j74o`He r_ݰq:\su!*\yГ41pw1g~JͅWu1T*<Ww_zdc J䞹Jgj.Re0 %2fJsW"7Qp%jSګj)p%SW"7QpCxNq6} 0ю+Q{w TjlOJ'7 Dn6 jٻ,*;nb;Ĉ1(nբ"Jm{<ּ>ٞ")Rn%v33hwŪ.srG;ztLuvK+L%ƒ*w^/Uo)ZP;hW1I9W7**G+46SoJ|RkS mI=[Yz5ZВi5+[shʣUgh P3;Yr_ǒZ ZDWୡ+WЕG{tQv䏒V_ ]Ѯ.׶ЕG~[ϣ$CWȮ}07kԧe'֫VT=gOt;mcDBM<\ABWdNWQwS@MU:` Lr/?x>KRhIGshJ>XGo{ lC|xh׏5d.8ƕ`|wmo¼W/|3mi從xy'Z$GvYw `d Yp"/z0|fީJcYIn9r Q>z(- ߡy~|[^ zWÙJF2oV:7\=`}~R<Ɩdj!/ٓ~`߸ٯMUemCԚRzbvO'GGHHUOVheJ~GX=BUEt'=ϝ<\BW-NW]=BbHG{ ~EkUѮDʣ$]=Fh&cά5th>w(EnJ`ۤ]y=ƠD[ʣg]yBttAй^vc}͈?OwuSmI+%^-< u:^)M`9Lgˣg6xrڠ:PzKܞÔ.m+s+rCWXJO=\ښ5dx ut`B>9]>1X.=1X-;1X{!5ttuۮǔm0Ƹ5th9w(9B("Е+x[ʣUʣԢGHWji1=\BWV7=J;ztE<`)[CWnX;]yvt39k]`Xk-tў+T=Ӯ#] tW(VP\!+F YdT aANHueT Ngw2f3 -HD L"H-yFI*cahTq'u\'A I|y;TVc~moc㎾3*R*\I>!‡zs%J*Εֆk`6K6~K!1MttTbek鲥-jɞ:9ri질Cι6ov0iRqg^d݁4&E= mݟO"UNˑ)R/שG/?/wVrWcЌrvW<]/W%M,]^i,a`|YJ/YD|.8r8 vwKgliqlٲmV6x&UZ拷VD0d."'5z?.Ɓµr15^WHXj(.H[ft`jQ4`^XY}3y0p<@hkr*Z 6te9zү_C#<a(o~3Se&;G\bH y|r 3xVH'RTԪxZ"e XL=WGb.\\`oؗ87?GEg| Njafo1}NޕR c!?*Cmoiv=Cu ، ,Hc>uvXh5O.LӏA(ׯɅy8_yT"Alr6d Ld@KF/ h:=(FY5k7&_G3GzP}\X+CԬT{Yp >0xZe>I}!B~r@p~UU+_Bibyo@dz Ef!tqhapNۢ㽞[j/Kѯݐ/@FNo{w6TXn.Utx: S_E,ŊI v 1 %QB4Tc2\r]黣J@Qd<(VuM)#"D bqFQE"Fʘl[T??q~- _E }ovCݛ^oVf3d:.Aܬv^vZ~W ͼ؍9o^9v)CIeؙ8^43a hr)%A 8B"A$ e"@P  X9 `E YѨ$RiGQN((Q:qKI5 TILUzr&Av;lcWti,Ko"㰲d2~o&PV̱8ֆ&Xّy~vd}t4rP ٷAvs f2)斐m"5O!ʩ(Ҋ%8v19eՎ9vv}tNJ7ҽ0ڮ/d0'E0ȫw^L[NLv 4򰨎>( uˮ‹hy ZC!t2癊;r,s(78-s8':h 3Z)x[[AtxgxY(562#3KW1 ^\222EIUև釋ñ]<,~ySϧ/O\Zzxn sddbwQ}y+( P@ /EvL+܋{ /|B#Y) }!"j| ~^OS/9b@5^sX߄ULQ@ooUm4ږv>3r~߫tb\?`7\f ^lr|~C1:quvxҾTstc&IQ%[V]nOK<~o9di.OIrF(`~gOO)Vo%+[R&dWOd^3H݃4Z+ف<_M`$Lxϐ0{Luxx/= ㄷOY1LTg_gQ{4 SY XxnSbL.O<\6$q6V  8I N}a4u 3iq%%7H2cFܳ3;[$٢68$oEz-^Ay&Di$pERDqFXbԑ#`^u';NӽxcccmQ(^u&$_r gU-ea(7ד;'SM.tm>X,KTKBm C<2a$g;I1.Y!+nKߦ;9s͚Y 4I`ni48qUHX1pibnfO9C8F,H; Nh$#[ _Y(a5:b"55StN-N"c=ӫG>׼V7snqǬut՜Ӄ>M|;DcBݟvtlkFG4~} -juNk91fըhzv+us4iZq&DZ&Xmcá(遐ՓiOC^hO2e|4T}/ivOT@q(x~|L.V3菣H,稈Ok{D}%ʈ4:^ )(cDbWcʼp=喾l@X G ށ0ѡq) yG*^'ef/3 XBjt6旽2 "|*ZZZt>*[&ϣjU\{wwUVh} Ǝa[R~2r2QQ1E0^+Wԇ]LM 4&BjWH߻CDi|c`9h0f/%ٓ~Z!7uP+,~p\qQx:q7 >.ܤ\8X\1ڼlZq?(+[E2Xc~u㝽6:nyz[]94Yq%c7F&YVHĭrY{;oٟ6KhzmwP6XYd|Rx|R?u7Ѥ~Ly`E ,E,MTdP%/+|ܾIYR.)ōeJ.J {_{+DropIzPvH#<̓d*~ T0ڻO*]{o[*B).a (Y^h`aF\IήS9%+6e$ 99gf8p?>Zn]ϺP#` x\ I "xCgXAꝱfTg~2kiFc}RA%ZcH-HZmXlbaGXT~KX'ScLv2EB*-rB3:fApȄUJ2{%c򕳚KRR^ecYqt,=)-} k,j}22}HS;8>DQn_qOi X9=G[}èMqՙ@dne$pM8d\P*c'kc׻ǾT7 YN < fO8 E&O K)$1#BkhG8)b9&\f!e:lV#gҟUvLE=92c}d 97)b ,s)& P@:QsdnnƯ̆ I@ uv~׼D Ug9SQCT;@-rT|?mx2K~q^4irvї,RYdOHӋp*Y6#Qy]$;=fK^_ ĭN$KBvPD>a1MBIEFB i< =XU3g3@:dL.9m!( U8*dRR5c5r֌ljgWʺPu3ݯrϋe& aw]E_byonAFolklT>%@[JlHrݓfK6I2tkVʺNfEP^dd)MJ`L؎9O03R/w#jsƎ1\}ոZ[WzF;fEb/lfr ~SLs Er *UaƆ2Y\N-@@!ÑtTqFX"Pf^jܭ[P̂^coZq_h*kD׈{1P6T8x=qdЎG [âO5>i+w\FFn3VQ#sgcD.z&BLNAíD!i\[ƛ{$ֈȹ[#v#ٲS|ոzV֋vzq#] i>\AY_&_[?T?ǀul_^Es3ncקqN-rᇀqmZ&ZnUqjZT>=R}tZRO  ޻uQ-0+3t]n(6Vl`/mYWbE0?bk1xT&p1  0FB" 8!r.=?k7].BeNBÌar(k46lW[SD]ۣӿ`~iCDBcrVc $fJAG%BgHVўXgYiKE㭏e@tih:#sh ~6 W;ٕHBɐ%t`#q\s,{VQŨă QNJHXϴg!F%MQK&S VvLLR;lRYzkJF瓢2(F(dF;P an܁lX\K/J?'5 Ir3F54,8ZKc?sǥ?B\73Ip6) 921S:#')p%jbШXiӽq쾍a5Z/mƘ!{b+c@9%%a9Zב$N`$ʢ||0l\o !h kIYB@*lWAeNc A@#u/18^T܃ۅuO)8fxNVdz- ZyIQ t)c&Tu@qٳӰ7_II%gPZb94m,K!S{YtF R]AOJ*LXvt< dB^t"xtfd+Gƃed9|OL CAWgg! ?+?h¨ƎR#?\}jv֕@r1ş<, 0zpVNmː?>tԻAWrfZϬT(&8㒉3v2 O`6'xz#,-:C"dhKAmm[ȥ_ 4ot܆'ϹyM ow^ BDk{k_gW/5sq-ŌhEC.8;04­m>uw_Qلמiz5/5uQyxq>?Y\xy0i]s~h[/sd09A/V;sغ}uÈXvpq|s\PShUlGK&#t@m&؈[_n{= IcB|b ,oGZ.H#H`"c#Y،EA=FzlgR4|áR"SVYfI)1w(1 e(L>tp~~֗tx^DO?<m\>*~YyEJVہ}!2w<;9ӟj &dHk4JDސ 2/sI8GvT/^8YDޣi2=ӆ9(?5Z1K61dR`֓igU2;x0a&%6]$}@`YH`MV(%9(.e/(\;^5rIC74tʃ?Pj2=cijngMb.c|"S<\wK+3puõ/ L6 it7wO9qlRˆZ.0nww^[|f~N&[ݼA|dw^Т;:^$Ì7z^5lD\o_''bϟZ鰹w+lвe2;#t+_1MP~,.¶\>Yw(4y5]yfo3MŐ.B:mz ='$ސy:;51?>ZwK2è4W,qNrRf$(~+g-Fʉ?-}vԁg?,+]hӅu^!齈}t6 ]4׷_I^Mtosܵa.h~y1:?Zy\|Q˻%}f~>:{D ^yAq(K`@8OWM'%~Y᫻[ttӋn:Fh7U)Ǝ069(m <ύ8+7ck(i |'ٻn$Wx_ @Aǝc/{vbc)RC##oITSb?H YYxHB5GX> b~ZĻEwP~^3wbgryq_䛊{DS=I~=_ܲh;$2RҜ!/ck0>"EBO~+fI!NR{/ο=+^Nrν޾p/ŻkUO&R(z7T5JG76vf \ԚNU&zF_fr/zlh2b)s{7Ss?CwZ_zpm>p`ʻˋc:tm-e7\w;&v)zU}O:KG`}%Ɵۊ+ƼBCpr%guVåI2 j0AGvڴQ\Jju]dG3Jp[*' 7VjMG2t*x)˝w7;xu(o{u;76nn0s|b+O N./^Лu;+'ֿ""䜰I)޾ -&ܶקW]>k ;SMb#@v6t퇭6ozsd&_n'->\O\KƢ)N̓WN[|w CP]o=-}+Iy{eȻx~%"O͓nQz{cܜ]ݻ|Sъi7/!dž}0LϚ:g|K$"W4O>xh͏|.CO+in5'#uߩׄqOV}ayD}*j [w ݲ03p@O:ccgo3S>2_pbz]gr^eKٱ ymP2BGlC%B׿5֯B}pk?\}u@_^Vc%2Jnɵ(m6 ۆʇ]?󥕘\Ʌt2-hх6W"AnsKi6rfyDBOW;+Km#4[sg? $8&{H故lH[fl%'ɗ hѵB)͛_i 4G!o5T5Z=r+H sEgcZ\fhkH*Ս [\M4jلb?DE0sbwx_٦z[hmM8c楍m ?"y̽!vhr a\vi#b)c1<6Qӈc@1CS@@x h'&_/ߞ?n(ZW=A֩ X2샔©N7X9B|\`笪6^r}Dq#lF JbF l*zs;dM>)d-b'܉k939|QM^BPZz_ ՐR"@i!r`)D֝OѺPZh4^dUbZIشV!;3L&HyPlZ31K,1 LmtAڳBAnQ:K+v o'0LgLȗ E+U%9$S cAn1x[ h**u(:ۡ-1y*~4|@֗xP\5 J3AБmJf%BڐL fU cJ Hv9!. %tA\(z+R|$Q*Lȼb|R.-1ZzOG"LPb2$L$PTg#VH܁m cǏ.d!:T?3ybΛ.dQie.A?&n/OO?6yv}R\+n^=Xk3td8qwP6r,EȋU L)B-pGQha6ypy,9jⶦ3*Z.T1S;6 n!z=bA I5hWՌ>#xXZGa?D td+!n;e6*I@̴T3JK)C2~ȃ XP 8xP`Qy8YU;L6( v8+lA A1/'jjC2Z@vڳ>5M6U#Tf5OmGi*xX?5{%4BZ&F[퐄PP}AO#!;73oPF+7s^oW7i߽>?7fe=a5A.n~xG9XAgCmj0P𻳩#bjѭ5KnԌ<[#4zefc֯iF_.OoH#Ԭ5n(!/{ |˩4dsCpnE5FH٣H3fvvT d|AJAJ@Tr=ƴ6$7P| nfzC=*mP zxi ֚ NPJ bdzs!~Z)AHe^XJF$jkPzPCXm(utqH:'Ea2E|X5fs˥ /J1RKEv,&f jһR+^C(/SJ,Z;)Z͘-,  !BPq%  R ^s#gv/n9?ssuiv١l;Zv$t_(Nӝ݈wwlzإ3y7,fܛ+;+kw1,G.Y{ɱk ZrV8A'Sr:^,+~= l9lI2)\P2tE` BEtV.D0OЂ{'уTԒbL I`y:iɒ >v317* 4=!s+NO\%qdQiF&)W6v̕NSBWI`N\%q8sGo@J3z=9Q-F>L`I^\=L\‡V!2aR#Q`x6WDX* ;I)i?LI-a%YJ;{vqk&x+۲XYPx8MY *X`P˜#єb%랠R^UO2{oLl7~Cg`D.i6oZltZ3< <)"*M Hh"#6.#cMp: CpH쟠/ m@0օ6W|'7*gM۳GtOa]J!MLxަV,la,1׽_w%fE4 I F,.4u01JGw$}7=mT)Bm.B3| ~wn oZ Ձ6y=Yopw[&7m迮Copde2.ma+'PޓA1Јas!!O61C!3s SG)mPez*Kʵf/,Nҋ4.U1L `5 EѴ'hEi^/;]MYJZ~|NMv4޽cwTh?7zT$26T}1 b=ܬ 3s87ogU*,ky:?>v7(o>n.2HT7/5w749Bc"1^.m4ufb#X?/ =jel:لwh %8Qyf/lZYR4[0|)7Q!i%9:xI^〗itwȌN& TW9 5J8nsab^vqD3qv ն%y$,g/Ѡ rPAi&_ 7/&h:͵?ߣ4E&{ԙ@pYz4UuBvilo!6a+&UZ?9\Lթ(tE]z{НtG2۽nͪB(Y@ɢֽj^]yOG+%d4Ri{y:?*n_M;*&\ʢ>yܷɱإϋn|Coe?ud l\U-/H^w7j1O>Y9St֣`)8BFLRπ#w9t~Nz 3b0$&z ~9BCi&oOGš* q,WI"lNzgv|L:>ρJ{ֽOFm =!熩e?'vSv`'Js UF'F脄LqY9O #Uc#NRrÈ0b)ݻא!rF kha)1H-hϽ~LxޣgI|r˺w,8t%iTt]VT 9UVJt*:Pթc%T8qcR0E4qJO ŜM upagv,&}1mˉEy[6Ny`1_;g;맿 _?9%7;'$^tIkA-5'! @LAϝG oD}L|%MksʋjZdP6m>lDL;1/#`#='[s紓=;ANW, > fMY-e:\u) 훾_nQpV%P@}] SY}76,goYV࡯ (|eo ̿O{nz߆N|Jc{}*W% XmmƈF\?1 1ÃyC`$ilP+b@R+ںfHB DTQdLOr댩^jUEW6p IUpKӽkϘ8P=tH NV@qqVƽ5e{@Q»>?Oq-bcAzM=V ^)ya6Zj֊иp"nT_mczGQ@{?5S7Rjԅ{^OZ{cx(|+ְD]Z~6;0MFi:WoI}SJi|  e]$]f5.3Meoʴ"WWjcE[MoIv+dVR+kI`%r * aRU7sc lan^#֢7+91mPuicW k}q6=L[Z:f ``p5PvMj;8-+VrO)x` t^ q2(MOk5iL|y֝n7;V$D y'IEk.gR\>Dܕ"r12僢SF *r4Pcjm$1yU߀9<09u?@ZBEf(뜺Ja Fxb6JrP7F/iW4nO~ٟܙnm87W)GXBj!OIm!.#'Gk70M8].6 ~7@.?-MG<r+j)}`n=!QB] EI")˭K9+׵+?KSPG/fJ^(=2x!/.n֥m:{޵ʤy>,/j[8KlOթ>)mzygv}YnnQL6P{STQy{mg0>.t8t4<*f ?|FMYnW-+p2q^ k*Q}L-c ml\A'D\sC%N"80FD.e(2C4W8%&b zA`9pCehu)`8G琒K>-I!(R6ݠQ&I@kuc܏Q_e3zlS)"QwT)zʄv4dG^T^"0$AͦmP٣M*2ӂ3>`(PTs0!%sIS}dkDlπSuƹ]u6%nx';sAM>+}&EQZ!S GBt?w,\|\uݨ{G;kﲍ{G>⼾Ίe|rѬGkB98 LD˔1Y -OjGvMaigJh- "Nx0UYGbn⪝ru!Okvt1B|1Pie<%L/9;-uåwKäN@"gMpolT:9"fziĦc6#gM rݑ#8a'%sZi I0i(9#KiU< Du/q(^QP|5\'<鼟j^"hiÌNΡ1%v{hXJh/adL`W5o[֞qL][meΠ;܁[M2?r\@M[B$Tq&үUiwq9T2<DD{< r: ZmB3b6s~ Z s4=1G37^1-W覸|٦ 6&lT$$22xxIHZ&CGHAGl[, BSQ+`ZKhe N$c l,Z(ր}T'Fº Q4N^XN q5rh'@,%r_KNv%P?v|+"(NJaSRȬI8lE\) )a H &i>ɩ0!zz k+Mٵ:us8:[QoLŕ>͞.dzk ,]T.o|?|]8]Vn!+\/~u?}Ɩ.9Gv:)POۃ dUKruKBŪffY2|@#G1~EwrWbݭ֝\ꪾ*":1PHnX"k0 1TdhcZ _RxBzY_.BwoGgN_~W(3g<={\q#0~VF7̱/(0_*ݼf9"__TE"^j`,z`$@Q7M3T^47by :&]o pK1cހqd~Pn$TcU^3]/j_e#ч-E{@DԑY-,EPOTQX#Ji6! +`#=V)=a˅V 9J[$!IyNsG4 SLjaO=1L]I'OlND7ߗfgYtzsU:,/j8ޛj%6nѯNT~\ٝe⺁zE3 |;@qMQeVEBC;nFs[ 9n I'[7pJB2T:\e*83+)Tt y2\ \ejuT.bd.@HX\Mh4clS.;&Zّ)~E1LG#&Z0lQ"Dc4LŷwG03?Ze&;әJ)0,aZ+*n 1+~/s_lfI>,z˳īե7L'lDR!\ϻr]EyxFmvmF,ݶ,^8rB?eqZ^OtK'G~r&LMqz5Ύh;T6uHELQr Rr&@Wh M"p" xR ON:JUBuٻ6$+<._g#y8HIYYeYU(ErJjm̊*F<!d`$PO1&$?L+Mxbg;} mer>xGuPM:2_NO˃]A|g.uH>3oeު̼imsFש#) .V~x )>yTizQ W1iF8z#_`]%'҉P4_欧ƏEScqpPy8A6L8* Qc4+\\ҁ^hBRuQjlc6Yx őjD=2:pBFQN8e%#Oq1qh!b0B-`zY;Xy_Îb7~jv=>O糞]`b;M`n(6'|ufէٗ$2DeUX{> 6oLMxMheJܘ2AQQ`TH MQFH\S M56"0LI\"jO8KNPJ;1!&&w9*_ vZ>]sy//ۣ/\B{jf8hr ` #*,S/"eoC{5uu7~}kB\)6M7gor4y4&o0ܤ7EjR*ATt 9ĀUXY-EaNe;;Otӊk̛qpD\׎e-:d}rmg͕y^:* SY(K,+ e)+ }j\pZo|Dtr*BB,'4Pxڻ{W㡺Km,@eD0$>1ֹQ@5,jd <F&Lx.NrT*g8P 8-j,&Ξƙw­kp7ƛle[Mw!].Λ1kn4hz~t񹁙H=Q$dYBfap2ΨWD*n8! 2?X$+8X*7Y"ձ:28l@}2(uGfL.+{oY`gocpx{wtmstm)W];žf|qrGގó#:*~= ~Ay9L22%2&[ \rMF$ 9قF$vctH?t/hgQ㰶ȗ-!ugoC}y;A̿?.P4I*" i8i4JY\pfl >iιJ Q!rƇ ruZ/ R"5 +c9S>rir-ϸyIД e>6#~MwL[I?͑ɇ=B"'gyͳ؁#vvKOL9cEj@=Cn\ϐl 2p ҘshM\.—wN:ȍCs8'M#\92>;imyWz48o@zoN5ӑ f*nT߻n;9:u/ඃNO{S<:(| hvH)kZߊӝrK\H?&y9関o3nX/VxwPOBq_=|fY#l,Ԧ#R4`pmV/o׊{BNn<_\xU~ă2LNa .]8ut[QIVSeK:ắY1Bvꍠ\ņ39^pzL=bˑ 5ٹN&N':8s?!,jYˮ'i=='=.&a2:?& X)~ q#F7ȴ1Cwm[v{={&!|GnņZQۨ~cjTkhkVMAZ9كh$G32q`sEf Z&Lȏh `5~FSPpJLxC-@"_%Չ"Ѥ.yg+*8{ ه\ρxi|<{J`3M6*ˢJQyxn5^6\BmS+ 2%ͯ"8h4#2z %J{٬~Jf9rӓ /fɰ J o{R'Nq;sR;R0)(cDay #?!1/Uɍd X׺t"&鸍l/\et1yO[  F꘬gA4) !*T J#c1qv#c9R iCPBc'ҮwdV4k72ջ{<|o~3rj4/^9b!V)4)guJ9a ф`pL3yQ'!{.2½G"a+T&JQ31cRD^ #v1qv#㚉y(]L;ڲ0j{$pRЂMV :r|%q(!%|[B~ E0I]FR22C*Њ4bMfB]>&I@+ 86ꗉǖҏ"*G&=eB;ƣ wP/ELZoI  X o ƲqZB*{Ԑؤr?-X8FE5Z(9w:H늆#.Η%YLK0.{\ܪܦ'q 59xIQhVTeu>#+,0`rNv&,BwWœD2$e[>~3|I&)ZJ=FdG3=UUW%㋇G凮e~Ė; 4nNpy?*ɜjV[ h+2! cw@B#Py;#Ҿ#ddJ +Өע&ɤ4f21;㔶@`UZ(eIF\qYaE@^*a2G';`&1Bfeɺ9!Uy= uh4A"K0kY@o <>L1Hp YH8;b&a#{ˉrvNeH]MEξo筒}.7ݍ[:DU bX/GJ8i&:UrjZT>>:R}`cU+&)nqx,[ ]swv`ء`U^M>}v(Ve(} ( f Ȅ/B'NO:u)rDJ&ѧࠍ}<|;`jPh;Dۇ0R/4?-+n &:gP1cX*j*͢D!Ʒ ;fO]{D1:lK_`~aCDBcrVc $fJAG%BgH֪=!(Ҋh`Gy2 :4O49eWug;sxt1Hk hT]eY'BK#E&i[j=7qܜs,=zێA(bTCA^('%$igڳ¦%)m˹;Ij'< L@*Kdt>):*Rj4B%&<}6[ΞK$Lf.VL9ځRD9I&D&A)$lRk;,m$E.Ŏut%dA[w9$-FŞ2=b#zcTa:.S9YNp:bX4L)1GKǔ0ųx#ȎƎ&J~6}> KUM*ʠND@_".\Lic(:A+Իދ߉L~([mP>xQE+E^ ӷWb@~au`z[w5uwc?> z)gazVZELk:Yj~_o-{$\Fj 6=($1ԼmyћʼefׅUޭ1\9[eQڀk*9[}ibo{ͭ(fv5N^xEctyIңػxd7=`<"·0Z,mVᤷ! cTU*\*nmh=BBvh7g0Ip6)ަTCV`-ؾYH$)B˜uiWAeN٢Aԅ2#tkQN=:fbg3<'+2-QA+/4 ŌKc4#uШLfw֊cV+h:"hs J+]4qp)d*Y"p!KHJXܫˎ'cL(K1OΌqHx:,9*zD;U 鼠W?^޿?KpyQT/RO˯}¢EK/ͼ񏷽0ZsΥp] mzeY{jpC2t!ԁEE=-u?f3.8$Lx/O3b'xſйhWexHtŀ%:ǫ5 epVϹy Z?8L.Q/۫oj.ev9?$#q3D*EM.bhICWtNpӕU&}׻1y^SXZE{M⇫ulCgvٙi0gJ5Ho8r5}_[N_)ƶ.5#7cf7["ZO4e)>^\/&zjsp9\hcnu1mnaڈer%4Rִ}6R{+lOxo__V̎J 6 ^h;O|S7? w^}8a*s˭J2i Iէ`صQjY1حAA9  Oh4z0y&OO˗0>_4׿ߴ`m5M-NӴls ߧ]搷J%WD.f!N+钏lj;O&!lx_G7H51e>1#] Y$G0$lƢW^ˠM[ٰ`.UM=Qp0S"ST* (h$lP3+wΣ$I0tZLqv5>ǭ{E?i~xۥb6W^:/ç2tVkߺbQ|0*\мК>ky*yS dU QT,$b'mpO}z΃G~([ $A@V$ 0:9F + 釙̼F1yMu?C> u$ Ɋ:EB`GNKYhrdn; fkݹ[8ͅ(%huH;^MiggwgZ\]ySMiϞ]Icka_ =Z|v>)<ē-.s+:3JUbP:g]Vx|՗wlK1 Y.K~'Q2I1gdYUE]^rwx b&9!#D{/QFmĤ(VUksio*KB$Z 2˂K$mfdܢC X`q o+;޾% ?Mڷ\/x4kfp )F moi704ŃJ\k6(fEn;L_36t(*X,l<%lF/:@Dƈp\rE="DeIY)"3b,5x1xH36vZΞN;GvU&׵Kf8"ԵW;"o4]ٍ{T (tUBuEwT=?z4uҬm;kQ:ڕ"z' Ixۯ[QJ/i:;*]#2 ͺB*YQj[v~QݕSeQZa2ovj0||zG߷臹wk;[s1E}fY1-svy&OuZ;as:lD\icMؼW&_M ⢍QE`|DVEce3geHH?HGс#J{QN+2  32;HĀGeݛ5>ZMN9rN>qd|3A`FAzkݹ1t}?Hpm<-BF &/p<:x M4E݂|P$ txXS:N\Y,KoM$Zj3{H$I|N1[)[qG({y11|/DʑHV?{O .2TcNlψ͏|ѧ5E2<|l'9xHF%aLwuO]]u_hnwo;h(mWɽGxb(oQy",Q7z0U,1rHOL矄 c<-1lq;z:8OdEb h^@HHeTÁ mjyn(ɹbc8z`>0)ަqwHv \)&LmCdwT`"XUWhCqHU@]K)j۲ګC%} &bPxl8EHS,(AĈk?{Ԍ=tц9bt2Sa2d e΋K%{_4B+Z t =2(z!Y5de,`~W]|dh}qPt41ޚIU+#C)`/L,Umt5L{aYˇI?v=9<.G7sȊȸp}UXkcWx|C*qCEYPIysv|O5 |>5g'V5ѷJ9$s1@^ W{5g+a,"X"LF( 𺵟^hHz6a=Jw9N`L MB V`R]#w To*?˄Wu 0)"~KS0̹*NRS22P2,Y[+A9]xc07NXᠺx9ɩ'(lq8-:)=;&8)cpy%la> fOb#XxO>GI><_.?~-t DpTa|Nɩl\}y"eN=!9#iyA!%~/~j9K0Y(mhL~UxiD)O&Hawc$^HZt$;`Bsگv-JU-<|)C* ~% 2xTm'DuG==}r3N}]uUwnpaPA.T=hM3vH^RN @|yY/Am1LPĸjn"-0 ?mRWnuT%"g4,^JH ocb_M$/k;2_LRwqmi׃CN@46n)0LrtY"3.5mI )ET bdE%"T30N5;&֖+)A9[٣X0xz-EdU 0.}9kaҡl>bTTXSb Z)l4OBIKI>P^7z}U/oų]y#NwP*^H%x$pLWI8* -<^4v=[4[^O <+>^垕<1K1Kodvsq~|\YUn"J׉c袀L Q2 :+D8$*2/);+q*Cz"OJ%^l9sCaO./QM̙| 6+t5 Θ{ cN<&qkC3HrJ͙t$3;fDSijv5Pz9u%QϩQѨ,w^uSvhZ qF;BeVHy)/u▱(NZкx LqY̙C:<Ncʠ:Ii]+\L1Z&ĵ r B($@b`1@1gz4‚SU*ZZ1 YNftUp c})T=j?ZMn ߩzlq@U(zp3F=nq#Y:ˍ2b";+\rxK9j{Zf9 N14s(#0Cn#ѠRm2BĈJ'%4vi"m!"6eտ8aYzgw mo:*~ϱ}r4=|"Q!4 J(V .5EE(0l82E4*kc&PfTettJ`p')5pzd GK_]A ) vEgY>eI `V);tTT̉lK扣Ԙ(CsA幗# ?#ےx%wrpUr'Rɝz &FH"r0_lWX8ج PKoyWۖܶ^նma˷}Dh:vF6x7'y|t>F b;PfNRh;L9~DwW \-K+`v詯vWL ilcHQ#!PTZYAa0\+pw y';(my@Qڲg;m#)GǂrTFSx<{ddN(6pcva䐤XHMLc1>"'^qLt0߹g"N3O1 "9t34ţP p?F4:AVDu.!ݤj/C{x%(*<Lw'>4 Z=5?jt%]w)h( ~CvjǐU~tx x" ;Ù]G~PĎ2_~d #|)C 9dSDzS;``zf>gv7_<녑ΫS [ *1fM@ZX.*^º ݠ*45HƋ>x?O>3B 5wykpU8nղiJR$ﹽ!/L $j5Ys rG jTa8ۚSuwVޞ\kpQR)^,Յ`r̙2\[kƓ墈 ZQ?g5ěGb|HgMÐajfY>0 V0bkó)8*AZOiԦVӑPH0rI8=M3~S%.TNVv6͙NOh7/_>yRby5VЭxa13J//p^nU Wt*$ }G`gep r-0/?4Am CS9jW9q8?.etM,D1_fxsTC=KmmZ6~8s.Hb^z%D?bI$N^X/a#d5Ӝ.:龝 5s Ct8Cn7q R64T;<p0I(0bV+ M D8mʞ<,[oYZÉ-?˰?}Xrk Kfgi'V !SvgWedE:/?9idkm#Ev" /] 3Nv:,Ur}K%K(dWН*ux;HnӢ Jтd k,)Pp2N:J>)7:u';BN81GT!ϣ`8 1QO4XZ\g%GC67j<[`ZGEd lVe"V%1wl&GQIH-6>/Jdlݥ+E ݏh}Y[oE>/D}+ekZem GD )sZ e!ٲ7Z?O5.3nPf݈(}@&@t^"af˜aR`J-3$ >˛]`B~參Dw^9yvN!ϥ)>Qm ZSVDq+mEq IJF(dOPM*(yI"vHi05<.D4"8~l jgӎ6όڼEn)22 t,=A aiD뽳Eg1n!+F.qVqBfXю XrR 2G:${<+~V\/ccxl.x*"̈(ZDlq{ńI]xu+l9Q d#]u ]VDntDG!1pz ",iP΁% S#1sĺμٍw.E=יMK23.[\ܪ[e-Tg쬍^EϨWB s,:iO*)kq<̥OCv_f}_rn5r4[?oplL.3\Bޏ.{?AyQ H zpu"ha#Xi]x$WGpWG8WG WGWG@ʧAS)8C8lb4@ sk,1$ҨA ۍ.I$HTRbs3W:Mfh;V6^%-|!) )hT 4NR4~$b> hS.>wAEy4ߧ 崋h|p2mmz$+]NεP
1o'4ci ~_tƆisPZU:}p6&19\Կ>|z[P2㺸_AP{j ~ϕ[ehSq.xX14lsAW߼&+| LASᩪ,z < 毝Ko4ꝥ Wq'Wzwc`Gł9 ˭."pXA,nG9(?iY{ɱF5Q ӯp~Є~ KA> /R+R[0EAZaL( dɁ䒸d,$-SM䒔"T+xէWIZƛWIJ.Zzpӌ\%>RWIZJWIJF[zp<-c \%q1;JtJRRի+sQK^'KIuJ>d WOzaUTuPgdeRKX |TM ïb,kXyFKȋB8 +50G)by r;@ϵ $e5n_ý#YgY$NRkJEB[810O5H *\k\ Ϲ~Ζ=kOgo.&םcS^<fܗ_bBT3!(c1|8j1ؤCr7isY0VEJ iWp~5Bc4<0 m)x*-V0`O64(j箁BlfM.+M@$R3 I@o<;@qfPD)b@: lp6HF'g)`z\Tknl Ru-%Ka&Ξָb4~ޤXXX/FZǮvqdwZ>@u0iz{T]=_2zGR9ED4QP0S % X `Y:&v-ї^eϼ7ɤZ6=pqu|v8w (R-t 4Ƞ{%f8)R*b pDH\qV!sL%y[a$5izʵcSin,ȥ҉YK)Y>3E4FarR"!0,s$]&8{Dq WN/N:rϯ]~xߗ[&7h: q2WE&Z:Fo8>z!͍B'Of1ާ˝O?gsgEXK}\?Wn*_'6?#UϧE?gik¢/䴍׷0SphS@ ~]Yڞ\K7[BcYA Il6^sooI"7wHuvЛG)=S>aE-0n }.9ޓ`kRnxd`M+loPx [V;:6~[ֆr&H{|rFA}êGNMm$D}v_B`f\l_x k$"!bd 2dP[ 옰Z[IfLel?x}H289~$eoЛbDjaݡl>bTԅV M0yՂ^KUQ@h?v#=n,m7CVnFUnl!#/lZI\MO Hjz$Bɶ۩5B$>\/1w #YF^0l6k L&8NP3YJ'M˥>Zw[QUa:m)O./ 9ke-e` 0GM,)2Z"FMsFV;|=^$޵đ$/ d<3FY= eFdn{i~@c ]QU]/"##prq z|=-Kӝ sny<.JJm7am5w\ofCMhjs1ulƊ$s`L9 r8e;qq׬jsPp1ɀ+&"'|1Jq!e X+YӢʢF ^{H.2! (3y:"1}P;we}s2b02>SמZ/|v~]WWAwgIM;ess]=+@LIz-PF^xКP; "dS=$\BVz_s0&U3:G#DHZ(+j]6,y ;F %tah75ן`^QeaA`T YT rܙ&7~BB]u{EjCB %>'@/ (%F63bIv[UKb%38yYbtULnB$'I36g႓ɳRN;RYȪILzӌ5rWkp!B[x3hp>;PU8x2^'̹%Ƙ$-5"/ >">OOfs;7s4GClcBSBa5^ 1SlFiTMx584shxw繧excy=b$!P2BI!P{'bgGj܋y>N < '8[0TYEpz_v.xx؆'NV 6zkzL3N v^ZBg'W/5Z0 B%^Oq ۮM=38`4|zx ؔoGcZ>=Z<^]kguqu{WW Nugi-V˹MgEWL~ɝK#DWK%con鸫݌f˛\SMg|Xl:XMlU[] ծ׭km^Hv)WhVFUl=9-pw_ :,^6.np|r'G?o͡Tp_yk? ed5.$Z#Ӻ&)}?/6V[Wt_E\},,Gxqn<]'Op?5WJ4jZOӺQ/'>>F_25S+B@)BARS2q^/n65z͘( yRYu$E:B%r'FB`ѺFzjgJ4-|EÎg{Bd+b6E׌>b gDkѸs'N dQQgP_tn`1ׄS&S.g4CsRϕ'!gJFR`*("u!KA`Iaoݹ7VKP1lLM˔>} / $B2l"LIMAIդkr)[ 5\N3ʨT9̶߭Q %fSx`c*YJ ͨi2~NilϮ\˻7:ٶޑӳӋvgcOuQAn2M un|57E+(/U*߽A"DA,)d]hdEm446ب~7BTR`#+bѓ J1gr(5[2֝{dr & {meYA>zu̳;*,n_l2MLf'W. zWjj-@ k)F+ gP alk^~w#@PHJUGWy⠰m' qx`.K߻}unfSbb:ڶgm= [q/^iLʼn,=iDpIRbID(HL[U|VJV4YPTVcPDLT, d:-[~U Ɲ}Ƕ, xœ+2>Jϴ҃dw"L :l] uY*B8o\1 yUILvI3 %=K*aun(R'f\Jg8[)>)ɢϊ)S*&p1KW GyH f?5\| 3/VB ֕9o~b~[y4[; ɾRApC3E?:]~,*̸K&wlSt=BRXs7 nLP H!/B[DN0B>yMڝTfRYjY4I¨, VE'@ j4R;1 rzbcWdh];}T}G{'5i"5eug1ۧ3Cd$ !l>% 4x7E;9s$E5T>VI+guO'iM&ԪD##}̭kԵ̺kgW0ewŇl>tGN釛_7~fob0X7mp2خdzL?ثJ~ 5Ԥ&O}ӳDitgT%"R<*8XtLO1=sӓ;aaJrY#HD@Q깎bۦ)=W`v7n6$.{@ ݩMatiǎ2Y1@g)g7_"=N lXrnDj(AƖBi(le3nw@V}& y[f!ɤE'Ej<Ȥ!"QLJiPko<'k!ZB$MscŮ1W?T{X'paWWůO@P;,4. o[}募8s^x*|xvz;yx4??mq{X=391_=sBsj+uXgZՋo_.cUR|יaʱ|&]>^~v.,&y=-Yp{k7bʆť/[:Z`yȪBG3gBpW?4Z|/ و=ljFL 뮞>kϥz5o V_cCxpߩ݄b2hLIZIY Qhyj*N :a=2ТFPdB0 2th+] ˇ:dz^Wɴi<ۼl[㒅2Y$T0(50Ji>tgiapw Ʒ;y1ZZ74;O[U$ѿP_hx#ʴ<@Ym)F6DaA`E썪;&)`j 2bԉɺQ&u(.=e' GY4x3~jǏ,)3{y[9ǟҵtqڱSo r3￱V6@b`^Փ;PgT@]5A]#[,PRR=R޵6rcٿ"mS6E bIeYdŚ%G・mKVIlUvX"yx%io|.b> %nJTZ_@/ܟJ^,],,,!h@V2xnN0J*W) r) -nN@jݒ96Tj}%(֧MNhi!֑ŝC!;k_ SV H]SԻ9&nxw:CHY@g>1뙯[ȹrw{M;[X}^G6=鞫_8x;|D(M\t8- x3aN6ӭ-66ƘOwro#OMJW0i Z} Ui]!'/tEhi+9z+#=qir҄6ǥlTѱ mP(HBnwQP/tBROVo-tH,mIMqA8!/QN8ɗĠxuc'Nu#})m[ FyZۻj@[qTD:*pSzz OfFkՎULZ saY-pe56RRPӯRZIFt%l Z ]ZIEt(JǤ6#Ɗl :, PrݯEHTGuհ`3\_O}/\>ow 6|%#vYa Cɇ_~lX& 5ڏgdAiA!SC!+5Bǧ(7/F㺠n?c(5$\AYz?pP1I^vWOZMJg-:g%xSx?-K9Pw[wC^4FwwjiݨVUJ!@t4M(Bd!Qe EupDnc \6VuTWu!F]!3au*B&$ATtute)Q$9/|ISxe&$uB WwOWӕٳI`;)vpꖮڡUUK]zJ1$#֪lRIͅdԲ +F5c4#CWW\ :]!ƈV)]`E6tpe6th: +"#B6\ R:]!J֫K+ɄV,# թ  ]ZTt(J1#YN |fYp+@H3+ixe  Iv1R\@q\ h}X6H/DU i[{V0iuD%2-;f Jh[s * U'*L}TFT_G=rS˲HGHEUKoIKZnhǾߤn=5k]~u裩49lHȕ9l8ݺurkN WYr4l,9@Kid.В3\k2+L9͆&2}Mz: ]YA)s²+KYAhyASR+g+"7ݫV5U+Ft eMA) IlmAWC m ˈ&BZBW֊ CI4ɈpN J ]!ZR+DimOWHW\2/+ ]!\ ]!ZKR+.9i/{RK3+LȆwh O%# +)-9:B]]Bt(P=]] ])Mcgd*{Fqc i4>;ŃJZ4HY&?$ \*s?-3E)-/+PCρb[_n2#F&3l@h4(yD6W|.z vhUSHWVSr \壮Q+DuOW_ ]I_KrE4|Ƶ,xtn-vtAK6-u%tL!UO2gDW2 ]!\ ]!ZS+DidOWHW̐"]nDet([^yUN ǽH6tU5e"uBptuFYJ2+a4:ެ%\BNWRJZNN ]!\is+DOW2:{:]ݴ4iU.CMiL%~@ˮ{V0*Y4(U2c)ӮfEcW;f99k8`;tDrbVд)x<)h.hv(e%wRK]!`]!\ʹnwLB@ֻD20*3+l j:]J=]CWtϪW`Ljѽ1U;QB;,$mAWCV9֝MK+DItU=]] ]1tulH6tp-Ʌ*uBz: ]qbI]`K1.4uBtut%(VfDWuB;g (BڤNW3ҕ]`v l|WV Q6rJQu6LgCWgCWV&jQ+iyRJY xcb$4Hu#d=ZĀ :$+ l]^5Fd`+1sDkҌG]n0)=$'2eL#ZS+D)z32aɈgأ Ѧ؞UϮ VEvڡMѷCZDkAWCrψ&<BBWV Q6YqN,͈pN ʮmFOW¨ˡ+MX\v:E:ɩ\j+@˙JHVtu*m3+ʆ.φT +)'*#Rl *r+@Et( K+%9XjGBWV'?3(MjHkI*W yR؞K A'ժϓԡVh4\6}4aXk3ZBR4w-Gg ((L!Ί͝j&;(o֙߿ApWG(*G/ѼM ˪%3t~?9? ߍ-Π7vu*8 uyVw:_?L C7}a[:m7߰3 Mk] RGTxSy3-N_<{ߩ\]]~/K,/5MC[+6or*gb:D*bF˵VRWUΔ$@k"m+Qr>EGm}…ȏ4Cc+)8&ՐKI1`ƕܺH%וƷRU $,)SV{ { Qڎ`zY~/pxЛgg.Y.͗}Y"랦?~~2xa,^m.a?L6v~tM>}C|S'Eo87: <5Jz{]Y7 H1_'g߸Go ˯7ieWT+W-S5?n3n>c .N/}o4?|$~Tx@ Bu<ҍp0-bVt1׏LuA2TŧKU`ޏsYg1u,ws?j!B;_9fϛAFէ^ al<%[IOq1x JXt %7>*><)!CA~䆰T,gC%t-miy*i]s|fnKOr;*.,e0Dܶ1(7 YUoo'Fxb@|o*FJGeet:c-AXJS84 T!sBd Pъg7(8>&ʗ\&.F763E"Và qb}|28l8AB$ID"׷0_&_W " S> z,`\gPxfR.1,r4M\ W' CK3QR*KG XgA(aLҔP NANmk@oZE\xgN.C>^T,DVpeJt > }gy̺O8MӜӼd*W^hPJ:.D*$5 •S#T9h{wэh`ő|.v4qtk l!6Wr7{O' nW%M1:=;$;rHZis/VW)sKٿ*N"΁YGiI jъC6`/(+ od6&%&,&6V<58OM GǏ@!Ϋkg_ne20~lq"?__$F{'D$n+wpc<| >O}4yYQH0DHJ{IՑb/ZX'aqyp^Z@xAB+{FKXKXrKXBKXK*bC^e)aTR6JRFe EA[sp1!jc ^PCpJYHYEX@]$Ҟ:hdp YrsJn7v|1T+-Q2QARQIwv;0*vMZ"{r 7_oi}5Xj}%(ӻE'|tuoo;ͧX2w1f~0OYۺ0ˏe_tf_t1뙯V)s#0Nw䷾;J/q壯d: 'Q&ؾi⯓xeaQOӜs&_77_cz5yqbk][s+}9{Fª<8T%'[qy]АH|9m /"% E#GHpt7&r: q$B%[ ZCb=T*VCe3gEO1/#H>iXFpА8r 蕡IG&*E4ݝ5>ZN 9uGGCo2B>$J3x2-ֺs;I_C"n}~>YfwTt@zAn SGgl6` SeV#UV̓aL]mJ[+=x饓]/0m8q2 g,TvGXp!c WsP~ `Ǡ}BQ(NBm5ZZ΂w*s =G,! gԾL(b])+̒L@ ./]b<@feBks V4~XOJ`c~ a~dnavpLf unbzYlf9={w:reg`Yh2Hj'gWvL s J)Mr6",BaPaR C#HV%1V`ZBڃĹqB!ĢۖζQH(Y- @9n)݌|<#j|ZnwjAWܣ la6?Ycx&J\e,ٕ\,$*A5 19fvMu%3RgzMOgy*l G!Pw b9!by \8,rzekkf8r<}GCt]jtb)ͮI<P;,32ĊY%i}kZيHke݆ :/h80t[> vɖRF(뷠m]݌脑y)T`M0M\z˵D)4ŠGZl;3f.[{OXL/J7:iwd0: &}n4G7YOOhZG xTuLq}}]h8~ ?qBg2cY>4{7ԟIt#>c72~?{/45Ţ6YEcy8 Wg8˟^o;W_rIJ uZW$7[N]m4 o.EZz9>/q/ g$pMo2> fHG%27/%H28㯟'eXѯuC:PꍔQ?,YOiUMdŏ㷃QZ{R~q]dϯNbVJǓkh'A½DEozɤ$/7Ab\ޡD$O0AJ%|z_40:\7ݵF7uKf#P,i#/f⚄.7_{1*//I).6z-@'ّBK|F S\l[T*5suu8ʓ'<TS8O9^ڔۓHYwHhp%[}Ziȳ{’n߲SS`24i{2u*C`c-5i?]ޱ ,Ӓ>{&Xe]'Άt I_h3b2sI8o 26EXY0TYtYƣ2U7GjiQ/vR{dY ) SҋR Eˈ $UgZ1zEc>f8JdNuR}~;..VZ("-hT*Wb Fi4ZE1+[e+ X/sqB21#C!KK0LdXjLs$ITrPGB7ȳ{#Az&%n&[3W)ex7\ F;쭶$Иmޟ+y"-c Y%wd$Nq^}H@FϬRJ7R yPvyQzu ^W2xŮwF?Ö{UɶCZ8S $cFUa[zWHƓb]swͱC +j17}Xo68yTȄJ^#S"91+]R4'D0]Fؤ7$(*p% u!r!rn1-.!FL9l 2 NBV:^YjfKyC1ScpO4)YyȌ7;P9a2X'<ؒIo C܌R*CNK^^ 1>OOvq&;*!3!FWBKϤ} f[gDSZM^tkk1Zѩl.h$f>eObeL,fN::tHEmXVgu I0He&}lv9m_A>KCTњRY%Tv\+!G E@#uh=@>U) {GnVֽDpZ=g$[d|M9ۍt8߹ѫ$94EpV4qAr2]Y$Dg$x'\H*]):+av"<LĂLR";ЙWeduWۑfJ 括&8^,%F)yL5;nt%^n'HK3ޯ?(ћ|\5t0}DB,-~x/\ iL-P(M23i#"P2Lp%Gvtjp`60x4*SɒS3Dz\ڣqvdh$[?y⌦\¨lB7gZ^[\^\kvQ8C}ly&03H9|ppz[/v9G?] ԗ$~+XSK%on鼩܌ul +Rڄqmh(Y|<~\ `jlUݭզgJ[[\HiX|7rƹW1?"7]dl!mqhq39:7?|[~zq~ŷNp'/~y4F`dmz.w)*5ĵ-L7.>oBѪg.KsFTePf¢GKMg/n?ךO7-X[MS{S4-|ӮmjRK6!N) iK&7mt@_"*ɏ'q$J=0$-dVypELH@8C;Vƥ?p8:1бDi,Kr(H‰H,(Sb;Qb]koǒ+6@Ao{"KZQN\$EQiI(Xc 9NC:Aկ&v_w/ϝ/c!Mwn˪㲝Y ^?w]^G?bU9o^w+NԷ{t?#'`?vcףa/l6+WUffAoXr ЋVKbb C_+X0ո`BN gp?,;y-_]χMm&(7]f?.Z|E-O8룳e||Ipٷ?~?[>nyy@{gQo:|rkG6zqyIG]{=F<^u|q(:=soƨ .R~0"vҐb2j|Vq}nHErVݲG(RRq&R5Ob4Qʈ^!՜\90j.??b!֢sHFE.uH"$7aʶw ݹ܇,tHv̄6 ̋O)O]?2o&qeѶ}ޭL254cmge}M8B:-efJ$iC3%"3'L8dA8P͈+U*Z%Z^3O:@)\= Ң5=:km0PkIA09P6Miۊ-`cps2~da8{޼,eT O+Yc:{s7Β־:'( ͔j0RO8->UQ`bD-Xv*5s'acA2\IJȎ:\xRtr_?;tNwnOjngW i,~0|DaWyѴQï\dڴTTRQY9!QfcLRK${AQ+δj "4|E*quK7m(<:/Ji))qA@Jڀzj,K ɔXFkۨ@Rc3hphZe4.2vB ȅʅkDmW2RgypQ5*_0NOW_8cW 9*}[|3L%^k jERف/ ė>W"FkW ~w{,;~|Տy~Bϩ0dSp-M9ПS(SbSzR988t06 ʨâ+Gu7r<p0<p-O:Zg:Jot xGM:`'CWWS:JGzteȄ du4hY:]ubt X=:`7LfX+ŇNW%#]=Crl}+fUT誣e9t(1|t1].C-}6g%^qT&^98VŊMW'y>:v̾? X[єE݋Y |5rrqVH]wjP{eg_JRR) MrQ)Vߝ-< ]=KtEGuuP Lpe*tђ9t(93+H{=!`cd};CttВ#])te2%`dÝh骣td9!G8L'\&:ڧ30VҕWN ]uv2t:}tQ#]=Krl7y}~~ZcXUo6M%Ʌuſ~O?a$>;_g4{ &՘1sXas<e./Nژ;>ãA{8!* C* ɥ04&q}FYQc4둵km(?X`Tlx8X>\OWF^zsnV6XP&NJ:\=ES uV3DAfMigLәxpmˡUGL]iRnJn]īfu^b߻My}wZ¨MzmPкe -yP6j:kt딿]W(\[H{n*iQMe7he<W痛5$g}E֧XCW?,-/b>zWkN_X\SKo/CgU1{s+UV(6_lٚYx=R*"ODͪ,l}~fy@}**  wCl4nVӲ]eF;aX^tzSѐ4ZUŖt1I[N]:Z!W^U =$cdane}O//ky{ίV hr89+kiQgn7J!:k |r:*gZ6tO{1\;]_J`RLZUBb.1[bQm1brYX45VǼ_allE#VLC35s)ĵPcn$wĦՒH-&KD˵ZaF$ѡւQAUmЌiۊ-`cps2~+Zt,=Ԑ۷ Rk和ܜi~1U@&()WbUiY{,t، \}cS6d-)*\pEk32孳Z4YAېR:hmƖ?eXX)e)5 F; IbUҚpˡq$hwYaںy(8xX!pG+Ј *Ў6^R.Nc%fmhi^3:$XyK(d_-eızϛ[!UYUcmϩP.mlռɵS4Jgaz2&]CkurGXG7}m b/Bp vXq)K[r݊ NWh WZS氾mmݮ8 f P)`'_zWbA[,;"4 m$I|txՃKy@1>n8@G>+J I/Mҳ]oG M0T}0w`vv03]AwWLzD*4%D'D[]OS}*5acR09 X|E ]a\8g4*J$I&d0wMi aƣNZi$|q*U"d : 5]xL5B3Cl7x>I#YS ~J4렚Ί(`6&S]eMm0HNH?,vzvF\^盪OynDi}jڲR>ZbE\["AGaCt)}YE{ R{'s 7D%|MUh#T[ֶkR D$$evᡔ<,a+ue Юt] Ac°<9Hz-xB(j[T!=$P=ihKL=VMtA+}[/ H"(82UC\bQ:'T)jA cvBxP'j ݑ8dDY5I{"PaXlE)>VgdK'꘴-5_GL7Bz+PLS v%޲HmRiGoI4^Ex2'MwI(6m4/m_AK%L0Alj mb=A;w|hށn/6se&%W uq[nJNf1z)tDac`Jw,Y:Zm\ֲV Q.SV[zhکDn`LHY]ýa#-OfĶT1@k8pS"GxEȡm*c0#H7"&ƹEIoJruXLtP)tQu0K[@O'2(Hu==b-z [-|/ЊyEaqҤF1VɍB׸,._c't@0\8'JQZT#LjMN gS8O  N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@s>N ;e0y'G, 4G'PTxv@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b';xC6~qbMꮼ n/zpHȸFs5!Rn\"=r7.#.>$8(72#,#6LeIe\=U2+6\I*S]S|pE*'\u%!&\`rZ1y\J)W3ĕV2ȜpE]W$I75zHmTSTFaW3ĕQFpEM> ɵ2\ڱڇdp*3m51\Zg+Rɸ%]9 rCf jS쭷2f+][]\Rkub[Sy׍ uZBk:^lx볳r7ח}oE-R t WOZ]WnB~VV\FiG>a>v{P;le>Ids"xԆWBAב;N n'X?HuStnF(3U}YM":+Rk'M*g\}1rGv[ (q5D0.arȋԺa*ĒA7WqЮ& 2•4^]6\-}.ZcqUƌp+D.Z3-fJW3rK $D6"r Y|peg+Hn&(']J%W3ĕE ;nׯO9>?фRKf\mrHqSH0VAF2•J wS5NOWP sĕ g]AplVHQSM&p ΪlpÕ AGWPWĕ^)5S_F tovMT $*uXn%Rjej4nD9 $8$\R&7elV".rJ;nh+Zq5C\E W$z Hm"2TZ\#>,&_=HUfd\ G^Dqp5L. U`\=BɌpEGO{ajheʩm¸z\)m HnWT)qE*c\WZcsJIp} VT*Ÿ!Z '${.x<B)j>FK#2 ,hr*1y\Jf+g$)r8H*3u\JWsĕ7@~ް8cֆ5w< GZBwȘBS9Ο>;a{ V­DXBDnA{*%244Z"2:年i~>*HpF H~x\JW3U p|y\-sOIeԌ/WH1]O[ rl,x\{`~-WUN:We\=e^pEWܘ HSq5C\( `J+RqE*c\W:*=Y?}T"$6 *DW$ةlpEr>+N+]9]In>"qkW Fq5C\9cI2aAV|v;x<1-C{Y+Ͻ-{&l& q|WMA# Ь4+閘_w jBڜ29Yx\%yGjz&G*?֛c&<W ԪxJW3U ؙ|+k]."7TWNJr)jjыv\\ S;f0ZH\IC^JbN`E6"n!>X([V9-WO+U9uX_l_7iҰ/;gxeO9^<ILab]^Iƪ0Ƥ"& 6h.%1"W ha՟_y73c}AX1%_R[JcMmmٺ2?{ƭ!vC@qwo(MhHyj$;nHd("p89$Rd wV)п?'/GU,+dvWi8-*Yæ0z ٽ[M}M/5Ck?Q`<~JuËGpYmvѵ;n2Iq?&~7D޽ݻw@3 GPw >?s7{ϱi2F<]tOMuO-njj03|Au./ a3MI 7dhkh4.-ZӃ՗?~zq޵4 ,\'δYsvnlXF4ͽY-Ϻ6h/͙ufqϗr5ۀ*!^'g0Q -#JB;D)Q)F *ɭ5X6,Ngn$_p}loG(.&r:}ozqtj^"r;:Xv&.]"3 SY+>iYbSg[^ˏ?%?7WR4nMigǪƏ ק-o$VkЋR56%~1L-<xrIM1sˢw+urM./)d<O^_OT+ƴLKק$3 fG~ՙZmj{H}wl͛=[t.[xAA: B(lԅK!"pW㡋 p{A0z8{J?h}*[F45!/wN8ɣ7"K#^(*HQ,AnB\%\{*{$e <(%A}Lx/N ojS"|2,ᑚ@'Fk_=j6vxj [7ioн5mmޤ/3+OG̈Ů [`,mS wCII3)i8N :G~^B1RwIn{fuD%4YBfaS-N՚) ARq`U*kBL҃\.:Eɐ:UZkyWp-ܳ&A-;ʜPN(]ܪI22r\Yb7ߖ/ »߷nw MQ -,HYK,eZY?hPтpQ|0ؙsQGu{WON8q _@|=d91'Q(ʐ)%AgHGOzh6ȈՖnއn;C"a{[ω *l$`G`Vs NQB@s`@ \ZP(jyƕX ^S.UZ$"+ 4LaF&99SQ)b%*\FC^8A1B%<u1@^ߚ_~R >{WqdhQ"Em+m &#[KmxvYYq8)L8I9EJr "ʀmbQVi㛦B}:jlp.70z[O+nuzY;돺T&Od'8ve=7a4!?I륏L|'C?t8龝a, Tx? n6F<+ !w~^V2wSeinMJKnJe0;q쾽t|El:w2ӺB0@wRIInMqv wz9q]LyޤYbZ'՞mR1ޞM -̈́RݹcڑjtNZ$ /oY{ Վ0Mr}C4SYd>-7nXgq 0B6qCDžf579Qfnx2r{zR X7B|J„Cr&IG*ȁP/Iq|aK L~ٟ|| CI.n$1E% Z}PTq2=6J6VɝҗRݚt>&clhUo*[sq ;H= ?{<V[`+Bhz_A \UX7܅TkhW,$Eʀ)&80}K[V` #"h"bLL Z&2%j RnUx.oy [w.(S#>o,G\. :e<4=@؎eTa>rhx Ij(;(Q2jLя7 ;>z(c Z)WT4@H`x!,AAƼL$:s O $ġB> [Lb82/e~?{>ǻnq.kxHȇ߯zf4% %J/q=]W]],V7~㓳p&2tL<kID"329 F3FSNn.FZV٠PNUw{LXy}7󖞻>wUnl<*/j7׆$K/B"4TT>0]SGJR@<]nNy|RdIe̥>}v,Y,y N0-Sdg2Pdxp+Dp`_ֵo8aAB[mh+78zw}D Ǹ5QPj Wv%<%U TO)G)"_ s_ /_=x3bIi A!ߜ |9W T dxJ1+]DW697ZdW0o 8ASnGM%Tܴc뼽/:"H8"`"rA=SHP2{D08ߗ͉{_? ~|wEƼq/y ']OC8%Z`aS^,~eq, |)Dr2ę5չ{,~q,K%ãADiuAi< BVŹpLZ"w87sAkY{EN{eJ`c84KE<HHZ&CGHAG^rپdW10i-y B7A'mp"+dF=)P@q5@ :"{>(KYY!8CMA`/'*O\rbqR" O}ž\JOQ%qaSR ҌL$6 .Dt;k[ECM\@ AG J `#mbD `5&I.d60+UZYNf/p`Օz10o@-f_8voƏ- BW;bxk'c *0UziLqY/5HC0J('|}(n:3zy0<;S[6YQpr9U//ō8npT }:3F,>b1vbpTީ \2"k{=y^eG%'7-!Bau#̢k׉TrQoLZZ+/Յ7惷az!̙s8JOrH1~U'[ )^ BZ- ysKgmͰfimfu|@0W G0b^ǣ@OmφM3UF6:d[m}UD*hu6\GrƓ}1bl6FP6+_pfpQ8hYNe V=cşoFᴎΛoxT_3*üf="ο=?(Lv?mcp鿿qilo47byn|vu]nioW͛vYLataGAmԝF~&đnk*o쇄@DԑY-,E'(K,h[GGMJ9c;ƥ>:vӯϱEy?㴮^=#~"F̅jW"(:Hy%R|tEFK 2hWrG9ܳt?H k<@'bc^9\7zX8-a$b>#AYB U9pJj{m@Y"h:uٶJgcݙ8[w6ywCM! T;!v_~,>\T9E@3>J!r]L'$W&éuyſV{pqi"i2RRB])I%5;;裕rw䲊Cj-O_pUv-ԏs+Rpo}-5c`GR6׶rh1jB\L6?:? OH+ z1JĻ͏MoySl4lړ7?0ړWSlxW7G-͜G]`J_MeUa 9m7Z9>;fG ۺЪ>7PXbX>e\ɤ9=-$GLfIiNuޕ[P[Pݮr/v)IX>!@p@5FCBϑ+Oy: \HyYmrFl5錧@QlJ3.> 'dSV2QԱӬ3q(Y}k#Vs%r|f3W( g0ut'=L0u, ǫOW#& ARAۅ2BM- Q4x4)E'd| pw/YZ&ML (!r4",dK"CD*:\PD.Dx k'dܟ"(ša[/]Kfq!fPxi'a 0,W)`s Ay!bXg!D)oEF#Ji abL*H"n_J<]L bdOJ~,TJC` ӽBYz/Cr7xG4Ea@ \(hHPzK|Je|>%Cgd9 Kΐ=q3csf\׎_Oϓ.sI g)8^z\iX9z/O⣟}pϺB^Sʔ1u2!97UPEƩl #&P7bE*&+6F4.LI\"jO8KĹӠM_yؼ2qvмQ6/(8k7=ꔝ|]A,jP?qn d2C*lj0e8XWNAo/}ӛnAGaF[۞<3~x vS'@nxR*Ai94x_&j)` s,N77W2b)}F* ,$ss{oF4W 32WY`gcB)ѽ͕TV/'lxZ^{1u.f< pjF\-I휶l2 D;rǗ&ﻫ=b`GfSN(vXV_xwdx~?5l+SΗ~B2B{6%@^'Ku#vv?ĆЧ5E2 Lo!*=튷Qi?hO>czz(ƌp@"_e^[ |sG Kx,vEHˮ 3bWχ]%py.*A+ȡd-_!`tLo66iBKh,>J!FUYPvݝVu8KJ.G;M+ލ..]Mex?WD8DNJߗԱZ^_P!* %3Hl E-aZ%Z10BQyf x5 dju?S jY1mT6Jv ۠LnLоiksw^V#j6Jm[g=#9fOL,>95 $uD9UH>)d+h o\m.[slG65515LYjöHRIZ@ ,B )6W2:?YR)np09sH[G‰uXT+u(&b={Wz %qyå hL8P=h:FY^1TkZYY&ƽ1U&}g?6d3\_ߝtWYfjEmU ߊzO$i)Q¸s*U8 D5zn"G ӹ s@@LBzFO=9j䀪ѓFOeJ0w 1 ED8H1>:< #*u,B*%XY țRSFDD b FрG!eLDc=5.OF_/6<6Nx\q󵋗sqi/P|v^YɵWr0>xQ[$b*XFJ%`+Db ښ"6gQ v*yk4BkM̨3hQ1,%! jCa n )xx2b]e۳o$Z0GLH${Hj2k#Ǡ*47x$mf -;5USSTSa';a5Ak^M8%\-EњsD1DMzPU,1rHGL0 e< 1dv[SzZ8OdEb H^@HF+tAEj=6,UipKg;Vix](ԣzզh 3We?Sx Y0hr x4a)7+PT Rp6rAOwasx -i쇁.s\|Ռ{ P:hǿ?fLwva_eaҕ(9B C?az~I2W }g]HSOHΈrQo<@c8z٫$pL'RVMW~̧w0r)Ja2`Be`\U8eje~mܫ܊PO30C&iU%_KӾ_ۊOj.MκC{WlawEiew#wbN'oO`>MnZq+qrܕpk1{7ɯ|^7[br!FՐ2}J }(,At_7&oSqşE+PT"ꕪ Y!`s0m\ᅙCCOQq&vf(oUoƒBcqW֯΃ZM2FQݒzEO/_f>|؟M Sz- 3!| T*E7_Oø}t62z-%{ δO,Pv^ؾe0FՈlhIq-- ՄƝf8+fDv.PPrԌ.~\%1x4) !#S>)H 4@ Ž JJndƴFG/K ƳH?;y`F*W!v"HfQMGSb Z)l4OUaz ]<˧a6jUÍ5C:!}5FŸMv.) W=kR ȾiOj㰟(+tq׌y5`y^VM52*%>GB@s˵93l⺿e9n;ey@dC~c$")GǂrThNIz9|D\4"q f=y!I%iㅑ4vCP q`9-RLp7tB]T.hwQJ(MMWW?iѼ0Iߞ>5-- vbQwPW"F\#c-D:)eö,$\$S S&aQFYJy>S3kYô8w )r8Eш)'Fy.Z!:2•g (Q1[εppCޚ&M|CA`u"c{.-uXE0@AL )d4&jciaA`*#^&C1ӘQqg#& nY5I^! )")5p4a+o4nW 2(#>(+,QNE-[l_1xȺ0 ]IݭDJJUaiZRK$ 4߃M|ځ"3" H_8*~JTr|l~ Nkѫ{8%/vBHO~CatQ@&, XmAg1R>V%GRe;`A&CLS0FhDUuoy A\vUb݂7,"LޔoཪmQr1bYY3OӒ`/"׋,ˠu2b|g˓:bؕq3~w]ؔQj)PE[}rD`%sQ9xq{,߅QHDEyE5PZr:/qY-TS3T0 פuzieM[_ǹ5Rf[F4s8tu/pG h:>G.b]*s\h% s G=A@Lvc]TxGHYW8IE$s2JaQi]R!H&H7}0 ^o&;0pF!$¶(x mLS%x)BUp)XdEAE\,Ui)r@='!M(TeC+g"ȁD'RlZBjbgp[B0N&|-XSOix@g ͝E6 wE*J|ظC/)̖fșAT hwߚvkM @ d%izs3BVodSCHdaSyKEc0z7(~F"ΙS1it :sVEհqpYK /_/an& "U"rSB3+0w?ll &R(xB^wv1>)qS#H*qwS[}5:VM_s8'+1*acNUncalhzp*mx[saq?;/זꫵ]1dѰi8<;[. Bu3u"ÂFD>M\z> (fmtkqFJ8`ش`/.bs:5 EztHƎ}}h<5c3cu]lGփ>x>mԨ*ԉ_ FM\:ϿE^kf1t[ZU:1+[VF`:w&^v'gnmGc>JCc+| &K~1|*>y76.3!뤦AwΓC~ܡ7N~&>#fb_{!jbY-lĥ+-OTQXGMJ7ƥf>n80d2H~xAd\dHI8qDsoGs2֝^nªe0^"rPV1]YN_6m y҉`Uˌpķy֒? y?\\''gz|{.;PEv]\wmo8$ 2)a\0 IBHDT{icˉ"^AH#¶6 @d3* .ׄ;s78i҂[\ƽ/P> $|LXέ }( گxlW`aW'b=m@?*QNZc ms[k6e)&+9sf9g|w;mwBGLHE9,Ę`6#19;&"%QLDHNDQ`ct1*rAoh`"B%`u<u;˝^iTa`XL=ww>AE{.TY05J#u_h*{JUx$4PT | ϔN[A(rxщіep$L :h"ьKn )/kML(]"~a6nZ7n!R3q3sPXKd!Z! X)'pBiIkd#{Blr@ 4Csc[J!&iD)q~Jl*SI孡v-&yġhbrijua]vq;4qjr>I 89U8΃ m*jOa{cKiǡ0{xcy{[YȽlWQt{8U~FPVqٲ"V Z)!Bk~UR ]ˌ㭰̶6lAƂL$zS!OyP{tdf,9B&p"X$XaH 2Je%s0 rfE"lLF!5 ٬eJNQ'G^{o9i!AvPwխy,EG @@ACȃ!ptL6% 7r;NbE*1C֕ 2K}h$N$Δt3Mg B0b=Wkuoyv\Ôi?d;rŒm6z.ϣz~h)ºY:o1SE-fQ+g|?)T \l5 j]QX*Swzx*LUo&]Ai~0;oFXLgoOx}zDwQ(|uQ>:6|wqt;Nb2vvQŖc{.:E `sJmHy&E yMVA xz|%ys :=ڇȽOvJapB.|F_䌸Ü[oz~)?4"UrT[\ikyHmDPiˉ2Оg!4B{i\r1ZX.%g$AUM&6e(r3;0ZvB)y3Йn7ms&ÏwPK8}pz5?b>Ĥ\umP6wT\Xhk~]1;!S_ks?B$WQ[˔s2BDNRv.\2A='D$ .DEAP6ʚ)#'R|5Ls7X%3`\*&pI}ưȑqXM鍡jLyp rCR$sX:R n)V dN.!B{v1D&8~B p+sP*GcU40]')& rޮ2c-(yNDGAIҥՕ΁0A̙?LpTb(hHPzK -R+p9{Ao5J[k9؞s1ǔ=IͭA9?}$9ӊx9sBnh%ScoE[kLW$s;ZԨ P9xnM K*6q2BN< -cMVy=i}}NۏShp~n d1$ 6S$TGA (28@E&}! t}K[/~6w@έof:L eCLr4Hrxy,"S'o̽ -> ?M_^%|/b6ގG[N{P֦}^$c%  g3]|`~ז ޷s) 4VMG0+t.̓lEAt)\i^WZCˉ=q?P{JP21MQz($z\j.$Fq CAe+2FH 8 $lRFe Gsos9(su; /WFJ8[py_pnj:9Nr;89Y^>Ɍvuf ߹eZ&v"c)H"!^myclofx_5ײHvw~1 ԩd6m-tAWż4?s7o(ALZm,! xPAtsARRN\ewEɍOSD6:/."Z=]6;uv=z9`Ds pTi_*|~]rU%$Z=.Wc-NѦcu{2tn62c-?5wxz9{|0wE}t~s /,8?MҵRea{ /YM }}@;m ).֑O/ohr?S2i[nHA=l&DhIhj2SmZ]ڣj='zKyGU6"\$cB ʻ(:r!ezmr>5錧82@M*Qϸg='$@rB$tRun+#賋%֟M{n +N(xH EI%`5=6:5T0xU)yD(bgq9{,YZ{,"7+ B[!Љkn2|JIjc+hUW?t5*Ky_5W_Ԯl0I7ԍ_Y%F_Ԝwr4|ߌGqk}:e vZ#)i7V ~IǸ]0u~B)'{y49eevx78btMCP5*8n_'2Eam""zmd>u4X.-'F w,IɃwf[L1SQ:L=up5Wi9Ϊ:HmR&>jd5[G$#5Hy9a4kpv ή58g]Cw͈ }Wq^5@l;y<߮srPg?##_yM3j(p\|G߁m&,rϧTIK8׾}DW^kPz JA5(נ^kPz JA5(נxT֠VkPz JA5(נS)%f]KU\Xh.[acl- \Gtnbw݁ pw,ۙF1;~ 9oj41 I҈1r.JN M]Yo#G+ľCyn_ほnyDJܦH27xT"[JJhI*U_DUtPD.DQI\7иpܛwT&F 磶4EK(7 g7޼RX`皯ܩ@醋 T(\pt 7wq6Z7O^}B(^b[/]Kfq!"ePxm'a 0WDzch9B<>rCR$4JR8Jd8 Sp>@7\刘" 8iP1y 2 5[IB4D*00!\<ÖhhoLw^yeѨ$ԤsQ"P!.WN%ʑ#BCo)+Bϐ'3򹮅6Ȗk!ӵp[r>}wn[+:Cfi sdcq}߭ҽjnO_<@}6ϙJU u#=]O/YѽsY#T5E;Z7ƣ^Ld$("RV6M nbE*(Kiy#aڈ)KSg e 0)6ݘy4tzg'w ywUY힮ywZ6/m?k?ꌽ$K h5 H7FxZ22CUD ` # `r<Ɩx=E~9Zk޼7ahA!7![(v+(|7p-cd7uofD_r$ J nP?B&S#8ěrT*g8P 8-CRl8{FL d:|I}IPrw' Ol[r m r3"r@ R" 8^5 'uRǂDU2VTY.+3&'N֌ɠCJ\l8)F 8LMa{O4YYX]l+Z4K#5k?M&ٟ%ʟ$5 JeDk0Ylkzꑟ}(i4JY\pfl >iιJ gk9Š#佼Du^Ed1jWNsLjs}:"E]qe,)o4˄iFlqNJ~fyƧ';eG,;x'xə謕R"QE*,MGTU<)IhZ"2Bg<Ŗ4<7ΓD"I!H?* whP:IG<7X(HC$r"'uyߟ^q^m #G9ӧzPs ͹j1:L BU!H[/FG&mh@!],PTZ\UZR & x&Ʋsg,&3ቲ&VSiȱRf& .j&l] {OV{=!O:6z{9sd0p~G=aDv*`pz>xaG9 ޙLF;Ga<{/e|O6{7ԟr^(%'ybyOBzo~e~Ow^f15b^ ^/\l `:zϿ:nӿ΋ G]i]X%CrRVI;pD)|0ufn<4Ví_7翑'.] >U+Lg\gg",'^~nEQ8v\"jaĭW#?OOo{H:}gj_5,^@6rs/Qd7WdכZrTnAe 67:Y~r1_Ί(K_];ZɛM~ψ(du)z i .UM_3( 3BϷt8-@VXvث"λadYu%_\tayݏsGуjfg?Ǔq?tD(XˣVW8\$#PvLu1gӪ:lDxKv-ݍwf;~p3]m[xH(*?*4,{fGcL܏x-f0[l3Ȋ}`agF 3BD Ik]h3"Ƥ{'د=u,A J )  &IOj~A(oFkε"fّ#TPcxx_Gkc @cb "e40V>9p*2gPj^w`H cWloܩnv(/eCA\x Gf_QzIm#a[cS{]PmA'mC%<!Re1DVR Sfe !"myJ(Q# 0fM4;EX"05Rg+ Cz&%Ѷ- DŽ WC1fo3Vy6˥Kݙu+?NآAGȈVj-ػE!íVb#Jq&dѢq7p.~|9\8paxri,͸;c^|'` : M)-۪MH}h8\ dҲ(D:GP"W# 'qs.A!x#Q[%QvņgR57=X+uOI@Dr83-!% dWB!(A)bB-9Gq2 x2$9s(r[jxeLv|ޫ ^{Wv\i-u=%w[n)bv=aA/{ws7_u(| Z'Ţ<)1ƅ񦳨# 8ad,)'Ԡ" 2QB9a'rrQ< l Z[6 ]QPy*/u /_7~u;FuTzsĎz_[ocgj&R?QO^;?l7+kXB N^Oa[SDJ2@9qKƄ~ٚlZÏŝ_]ϖ^nQNknG:h3:}oC4${Λahfy9d4`^,ǣB.MΈ^$zmk]ha,aE#uLnӐAg:mt ?F]oԨ[U'~]46-nw`rȎ߾oW__}Oe_B4/sUj-cּ9W`CǕJmKvlFfى p5YNX6Ѵ0{a~Y@Q}\?5#V]s#隗rWrC] ~I<,İ72~/ Ax/&wUU?F#'p*u$#!&6udV (r~Ă|TJ{D؄x+NVGzhcÊT5|:FÑbZ !9%* Q;sh18, LșQ`wr> ϥssNF?+^<7mqvީy0gY冸.p$J*҉`UŌpWyV? z2Ǔ?'3hOf`<"wJ2I  $ʤqJ"a\ N-92e5\.a$)}IP A6#zRلļq&Jvb&(ET?[7.M2vB q'|UvYV-\T}8eZItg>%֚ۨj˱ ^jpb$rM2HQ^cc}LE셬(PMc9f(eBU|;p^c/5 B/}? ÌN s(ݛt3kړU;+ڷUj# ,0@`)sNCMdu+a%!EG CPxEdp, +eHT[yo> |pUs1ƎK>veD3#ډ'Fj=[sT9$^d=)v&S0W}@D WD)TTY|-e-&戯Ɍ69&x҂nuc;783W‹~ Cu&%"̋<ċ_bk]K-f%bՌ&;V\8r@/}{bK:vC3 (&W]=Qe|EiG$Mُ~Dg x&t>]qlu2&N`:w>e2։*DȧIܩ3&Bj>C!@]Tzҭο .;eI ЗC,fFMC%USRr1 @Զ*]3E 6 ٲ #!/bH(wz.~ۿnoV~f,={ 9'pr\AyYT16]MbdqD#S;^ڱ]Y]+P>W};W¶}׵-z ߕqp延om2[́egIg@]]Bl%./}2gJӓL-CE.g_q?OE/,A2%ɖг} Ε ثK껕&/{ e F[f%,Q޻OCW7_ *_Nմ|.+]3ܰbq~YlGw~] 7X/2(8\`/?ϖeYڛɵwk]߼4>}*ǭʘ˫}[U`T[OA괣~!T]SRsV.epAX{BY!G"٧h?NL䟛M[;3DF9/E 5,w{'ޟ/i)7W[כ[ޮ: !ٻ ڠP@c@^GMIiO!2 5Lhѧrr|?\.AOyL 45v(Zi^wXlx~9grB.wAJ40UO^7< WKNY˵Z Gjk $`íFG*e5J2Z-2U{g'@I;BV 5V T0A*XX@Bh4HLS}z#iL2.+h@ʐ :'}EM.#$\k2#IEaJ޿=ݎ&NT !%QG*/x8%JYqzh(w֫0uUuU4 ZM\ROVEmJ[cmiż@KfұEY6@^p #{cm]5?4td4NUu;Odz:0T:vv1!זkVsXx, +cn8(U2tatV%Y#or+-B1%3j4y> Niap<pr߬I* :n)pv*BkE1Nx@BҞ'Eը7}~Ꞝ9]|X9iX/ߝӟ}4"[BNUuޤϥ)uS7eeAd2f+ 8ATu3O_rH8 =?~]K[ =^Tjw?>^vs'oy{]Q' n{X/}4>&0&'OFtZ[09X*V7jȥ7jt`4tCׁ@llAWf]^YdFDW ]5hBW C;ˡ+mY1ѕWX誡5Jĉ^ ]뵆ULf4t>klp/jh:] J3 YJ3Vns4+AjvHWƳ]#(5jp ]58x몡$7 +K:lq~|bg4dz?R&6x_D-98fA~ 0?~?}1}pֿHiU~5.˿V]%D*dg?\z>볉}y4gFhhiaN)s##{C)+Qof?H|\$=.G(rtqtD7?.K,3))#?R1;;3&7[=nph2ݜKi紷<"r'f,t>uvtPҔQJ<4m^=g^ .UC骡t~oKA+83`|s+Tveg"Z8m0U7u$^P-7H(^\޽w z8LBMj~]XrnOͷS>k;,br&G̫ނ;4R0FEYgr uT6zbQX,U낲Isf|g Ј|J(o/es}Qz Y0d.\X`u *xUmX\,!@FgvbT2k@;rt#{CD劁#+B12ަINj¬2dҮ?Snd+FI DC1VcSȥB/kYL;%|[ :T",9ɘʄh;%">r)PE|)YVĜd1B,J0L* F2Έ r:㩘TdV6+Ru"(FOEa({I-#ry,=![eeZ[%,6fd̍IךXt0a|M1ٶD+f7 Ye6>jbڠʆ[FIgA,0 ?f@`\LEZLȵ:WVl)A\N)s<{+R(D{@x`Ah0yEUf7x[R";Ab0)De[_˦d7A&zq-I>IXMBiR4Qh &JۘY!FO*뾘LJ!dN%CPqVP g`1#G"E`Ct ufe Ѳ;H{(J\ʹo{aJC`/+ '1Z”}Ex@Jf-V/bѩ"DOVymgMlq8pbŀd+OdlM XLw.qW+ڎ"nfGJBhɶ5Muhܬ<:;ǖѵ1f~W1{zR­u ihgiF}mQ*o>*VQt>16x6P6=)w˽AzU .zL,>PL1RnAk:SvtW;i\2gRЁ}E14>'N XG1jTP+l j*6`Xg7QFhOmO N+Y;07ê`Q lG[@ C}JFY3ᢩ4N9FU]0XgSPb zT,,8O:p0vTL`p1ujMAv134k#\=+tޮŊ2Rp sLʘ㎊EUISN襘PϘPiS+(H|TGE֢WO`J*׽6y\0YcI`ueԠQH8(9 [Y Lbt\i${}EՊX9&723~w~W],藪+fK,<W.*[; 'Q /Q}?'Gy7ߘۻne)}C-ַQu_wsf HB 3=P. \iƫ9Jٷ9f Ue ܇4􄾊qG;tێjPINCO?.$߳ݿ?!ߌyv)RusVL%V  c0]Kנm8i"!&*jPK_!8\#\L3X X n3b='%rFE`Y) @D!]xjhnXǦ-&:g beV8B̓oD("(يGWnqm I!3ӲӪ*^(- ˀajQkqDƣ"rLC כџēڰUV( 85+L!(DMͺJpk̭RWHOzgq;;{FgP[τ(uY?zk{%v>*29'-w;,aS6ih[T_sf7 kB\z\5MD9 ,;PZP{sFVfυi1AQ%$}نF0 8%oC)73P@1?i-j2riL, }9R1;Y 7/n!T$ 8PbSO qfH, քr:W];~bCP޹}J+e?3`Lxo>-77w[{LMOU3aT*6gMb?m9~{fMESOɷ7\# n擽]v7?~iяE~N-]wO9+H ЁUo<3p-z1>y _hyJ|͟1uo6>+djS%],VO,:ؽѾvND^ti b&v.^(/vOؽцB?MZO*t ˢ\O@V#@yj|˵ìʰZGk }C$" i\AWW|烈[2Y]G.{.4{lj b-]YզM5F'>YJii[jiV|\N~JBVs,mz8Xf}qϯhXe2RlE߾.lbٜ\[ ldBiVW U<Joґ~ \ x8apjq*Iɷpł}W,U_y,\֎+* \G6}xs{ NH.p^\-S~jʵ pWOmzR~ \೯]-h\ڤ׎+ 6+>y"VaprWڵU:' l2UwL?WL+k(b~bWĕK.ّpł8 ˍì]DJ~Uj# (4 j\sZ6RTk;NJp*T;%<2h$MYt3k=LZﴺYh ͳ?_F X0XWVWXe߰E +NzUUTN?gVVe<őL.Qpj/WM߰"F8erϽhZsW.SiW/ pWOmz4 X8 Xw-SKabA 6+wa \೧ +Vkq*jVh`g˥apjϵvLeT dZ`0bzpjϕ[rmi8իIq$wEZm:mFK|\D |W| ˵q\ZZb^pI\Qp.xn͢=k?+>2Bsaݖ@gH-k3W~"2ʚ4vz'/=vtH'8,'v]tPA1%FڅL. SA.0Pw!`%-LhW,8Xav\JWUU!;%E4 xWPKڬWPJlFVpZ"{"j9E*p* i0 ث0 Xվ+Vypj;2W,7Qpŧ_U\mWŨGrW,80ܤqW6U i-kcuø+VWXe\mWZЏz+;]tR nWsZRI-T/YS^mWl~\cFuvbWĕ 6; 8W,(b֎+Vypj;!8!*5i\Zz\JqWĕ|X@b]+VW_ ʤWEHK +V+JW҂-ST!rɲ ;LL ݚO7c^#{9xfa+T޵mlE(]Pm nAM`62gHIlIl"mb7o3t#$?s`m c|Вc R5k+} n]k]%^j? fM+@qGW/( &!ZDWXZɶE`ttkCW0O8XG>Z@tP6l8fXI]`BEk*e-tЊ ]@Xh[DW D+Zc &*ԝ1"KӔY7JLx,i>JwrWP?ծHeŲOp kZ]R̸Ԭ=^b0rkxa,M2l 1ۢ^O6jnN71mr?qt~rbRi.m㏌ EDڀ(,DtRM@J@Qd<³(VzSjʈhA #(H8HTGsMn~iȦָl2"5nUkroz%b t_?=X%e3d:<_Y첰$]Μ|6,y'q rZ7 ts?r%svf2 #Muoo(-EI"(@Y(l"֚"6g4*k 62( Fc0XJBGpmpn[u3x=^kW5{&YXakr:D`zTjce;KL55qm5-ĝ4NsMQ{β˕λ_E޷&}7PG 2ExbLDlRcpcGaԗݸI:bx)6)A2Oy+ c8[O6x"icGNbZTE Fg2DO~{Lpn;Ŝ(aHPۉ!v ;si+?/@ɻCu4<R&݈nxB;ʓ÷t:x#Fy5,6! Tq" zdr*xj"t6줞yDQ@6+N@{!dTÁ (yja$׍~n0r8NANyP1޽72*`b "0u y܍Fz7w MicU! Annho S&THt3 c9̞;iJEB[81smƂKН$"hp$g-l+3 𙙼+bz3Nq8cO >A{y~#LKqôXʮ^FpT~"gc70뿛 \.1]z1p|8)L (F'~i#~:( u4M?+򗷕 U*9BAOϕY>C_C̨'$cpgQx,1@- Gi;,#֎[~3Okz>r)J |4UI0z`,91*TOl޻~nEoK?732 Te/[ǡhϪ8?O?~ -6 Ob6<;2wǷ߿F"'U:s܌νPos?ƓIrnʡ:aV́Eil,nz=Y}+'MK]^.o *dyFjGVWºhƇhiyj4ņ]<>;"+˓[^fOH0y@H)9#-zػ Gq=vzPMM1q< c^Xn4ygS<^tq tnHxdL{*9xd vx!Y^ܒf+ J iNT"{c-A1 EMTQf Bezq֖:Mu -jn9sqKcrzJbNi.RB.F|)H ,@ Ž JJndt;, Gr58ya>}x "C`}$J3Ĩ&#é 1a6`'FIm#ZH/@?eE'd ttPOgrOCPUE\kR Dş'\b!${MlW[]qeQjqik̦X̴Q*c e2+gI1y-ayЃ6zp"5fAɸP D+45BX1po5fԍ,`k U cH `\Tk0XekVw5Bjy﯉6W$wg&yxMpJ-Cn]~6gm{:9vH飩gpet:eHo~oՆ Mun vWF?Fiv_h7ɶSZGRZ _P,"3#0(:v՗0Š(EAb!YcI"|ZeXXS/>'[|jf'#S#yL`\oڡZ3:8]s3JI"6:2,"P;X+\$JKƺ_-ŧ, wiow-}&gGZo> vhG#'ˇ <^u =O>YB9y\#l8V ZXΤ#iGnY*E7oWń 0uYuvSwN[pU%{G~ Ekg>Ɍ:NU#k0DĒZ3.Y:% N(X t/u wո-u@Ӎ8 E(kB$,{0Xθ%\C e&RVa 8c D佖豉hj4BZ"jcks Rv,[{C?_/;M 3o-"`Dk. xf ԒzaRE(z -,0vGEgO> ,IB9UaaR"v 5|`Eo&Mkׯ6į=M/'.*YЮ73蓳8|g9>Ųky-+r:x)E b}!t&$U@<0ֹaq%8rC*Gk&#'8>jK ^Ȍ ^`A Ɯ$U73vfOƅr! YDžjׄ<)Q2|Sl-juhQ$D VLR2tYg#c5I@fZ4b Wm1Ty>h\*T$pY@Eugùg <ܜgQԣ2O&uv2{N˴=qӕ:|.i> B̃$ DOF ")(I4xɋ;:ݭHkYJY/@c{LX7-w}" +{o1l_5fIh 6ƛԀcQHLn`>2SOFU(nK7v)ݑN-]師;qUi,uRE E`Pi,A%edAFǒa 9vo$r ?y" x{\ɍkxfP籔&xh n Xd )pϮ)]ߣ#daiCTV!+FD'|1B"[slڳ(" }"8އd(EA"aeL ;=ܮ gI~EUwQe\G2a4C!cއVQPcnxOf!Hgu^kpIQRޜwE?-Pe |t_V73oG2TM]Kʎqfv狯:T{z4JRoug'{ I*A?C͕zXFWAFa3Y s( dP唂/DʂB\Y_w&t:xQϘwmAܗ='"]['&5NizcW,K!,J1%6IZlDwX+|6h)%ӡ؜s A[p5x"o<4ݜnf\k1^Q2\X/+V"Y椉/wI6iT%xrH]n7OhYyG{K6Pɾx&WI,䴭S>>VN9)- ۅ>?ɒ*Y"DR!֥BPxL1nXO\J9ԒY"xq[A𴉈cȌ蜎 ԭ =ĠfD?`A90 &J1@E07! DpAMY{bjI1lYo˿6vY'J}c;9j1jJ`XbOXwpXk78N[8 {Qy=b646|z9DdBjG| :M'O$4{}* d̼HY6m-_MQ{&l樮 K^oo3:5~KGgU"g{ӪV OW/5Z0 By.jR8IWӯOפKUMztVOW5Q‹hf0ɴL/V˹bMg.ۓ/xm&ύz{2Nuwak7W+ZO_O|ˁګ{]^֩Mz2R;|<łF2jc,hl܎FZ66.o옗X}ۗ7߽: ^狣W/<yS8^pTZ1]ؾ֯G?\,ykGkKuTeRw*GS'zb~|8'#AyVu}]jӯVg| : =  ^;u4QI~XE G [`_).s>1" Sʥi>z3 XSt1 ΔDkѸzIx@ MY& itUNgkyg8=qқjy7}.~`g]&hJaQAMpըܓ0G `d/ja?wTYR"d #=PL-w2xp L`Pkګtgo(e P xKi;85tsMC(cls.>}C.e`2M߇cO15=@UUU;^FPc 6n$قv^rOR^,;/c^:#fs)9ILm2%Yk$ 3v /=Q$rh d d$Z4HcH"X"%Šc\t>cҌM6%F\\јs*-AyO /HJw!'_uVkףW-+,-=&TtMb>^z5Wtj qݜ>:8{3HG>%)dl Bixv!Bnݵq>jǖ`&zUK&q5X0Ê}jx2ڏ=|(y0!^D-oR!2a&d=vVeSmw,ō8d#F{q8nϏE{p+hU쾸ʾTJ%FeݳS_r3KҜJkv7][} ,n l~O7˧w79]晻:Ͷj/~d.k-_=s=g|yV"T˟Ne = ;"[6KsGA;3?ǧj=Iu3XڞƗ:-lѽKnprҁɍ)hBA)9TDdI/BPjjweLȞT'FBQjQJ(] !L$%?vIdd2nSvU@5$I!KLJ[!b\4ӥc[gùG^>o~e>7 Vro}j:=.vSI}\ fOWnWWV:GV ֟)X]]g˔`~+/Eᩘ?n>g,obmkOzѮK>jZ\I.N,Ȝ<(IJT݅ go̞.?@_hK $Vbmz:pUʹq {7} kG?./~ޞ-?\d`wGgm|X>ы-e7K'OX8ui;j\rC=M!\ӳ{wJ񛏷(!9mTtcQd$Ơ`~}Xd/ &Zτ0ɢUL6Gzt1E&!`\w7tHv؟SaCR(C1%CJIT "ZmW M2#1"sBb s dymuVgXүm$ևq98O#( ,v]1yK(JtVa6F4#4Vi#ċp2\.24LAR_7Q6L6l6?KHߒ$|! .9Cu2 ~'#0gn([60k4?{ƑpXn-^}?\ iL4IIVW=CR#XĊ<6T΂l# } Yg|-O75`{cJ$Qyƹ0Ke_Sr(U4XHKCBʊ 8Rj@\0N,s$j\{]N ģ)БACQa-ZlY7I-̾iMFc.d 61Vٍ:پG>ځu.UX|W_ <ې1,ivvwϣh4a_~7{:"S8X5nk|MHjlz%PsQ[T4zKzp*TèT`2![z%jO9U%t%F@/awnff*pglɋACl< r-CN|:p3t}s >[B1ERd3qi|3/d[DVTD;:yӏ0q*y$9Ly:wYd`ڈGbəEGbYpd? ,;*;U^DT"M Hh#Zb DXdJZ"۴bjϬpnO q.`@)i~R(|vB"vke a-;e9#?w_f|Cksфqt"Np5k MEAtB:9ҴEɗ+Jp5i ]ZޗP.l9ҕJ "Je{*U-t GJqX>PPc+]v=VTSc +j ]%J-*gHWD Q*JStp6C{P[fGW߄RU nk+4B@:zt4֭]%=tUm ]%@;zt5WT`ѶUBKc Uo~|:)~ք*#FaYKy)▱'Y|ڣboULUtnO?{ jIQN)93Z|'&zGYQ9NPoSGHBxNbDb7fX .#ɸu#:teS<`s-:2GPҴr&!"CeZH.7٣ZVYr Oc q<%%PΒ{W 077>?)k|Jow1O{GE6.2,Z&/I4.]*\ЈrTaAA!+p~4r 9In&7s9d|bgy3@^,j5!n%5(XIO?w~EV_7PGaM ~U#21yyh?v/ /5^]ڣ`?OM1Y6IyIpҀVSJ"F5;D/eDLc?4F's6{ޔC8-Twg#O) I"/~v@Q[_ 5`fBY8)%ʶU`M2D y6 ැڤBgM!,i`AZ%EKh8vm, 괱磍)̪gVklF _[UtA^-xYW|Я]W'tHe7|NnK꼝2>yOg^wHF+2so2o˗`y .KAo%J#J{$ff 'u";ya\a&Y gc?,'BY#B ڣ)e:sgzn:wl U"TB)\6u{fp2NR8= Dj%%=S!/a~3sS-ͷ쳤촢-GMՔ,`ii2`[N_3M?)^{8a||TWc&TyRDC!%Ř#)X[yFC)kSPvd(<%U}VR⥵[L8j]ޫ#g٢F] lkp-6 n[t%mcnsC(J (ͽ4/7oiM͵k&)U數RDC ܐŝAL,YQ׫#~Suqk"`ٞW$@9 .H ;v=!$8<]C\q :n?{nc-+i ]%ZJ;ztl6*|z? ΒXI_^)\<_-$.d2}MFE%"Ϋre Cx~|/t<)!J)|Œ'Az>O^#qmHt+Ov6rZRv3 F: X( FP8$dB TJB?J^41Fn5! /NrrF*}JսJBq1i nFX:qJxz:̍Yn@`Fs6&$0ٲ'|hjnBH_\/釗0EN93X?p2eY|SҿsƟ+.x*QoǾwBW8+rFP~~#En|/xbǢk+tYsg)wFVCPҏwӋ&ifZ|՚+ڨt[͌#S#yL`\шiF\^\\'Y}<I~z(I}vf {AUj<OJ#"2Ÿ#a5L .k%!26Jm '}ꗲ}(G|ډxVNܟ-.JfnZz1fpsa_[u~P,1TE؊U9`Hd^8kN{3h[-hxhI= BR5Kӻ RqnI]ߣmG'ULP5PLPtSa92R+L/הyx9#"/ 4Q*fvM,go|2DZ2Kb Y*h>UK AϾrKylqNug0'B 6Y댘>H儽òOpyꃑĴE.ָ$ -.VcwI(ux. f]hR;pa[rm7`%>Pr̓0C43ko`e͌.×{[;y>Uj n =X> TT:}0WS  ̀ag\!AXшkԎ!LHYu2 ㌵ IDk1hFs+%a;18¨yɲ,mQ^.-ޮazƫOOU;<΢'$fZE\YfDkuM<%^T cс -,0vGEwo> rroD5렬]Ɇ#)ד`$j};@&qom,rqoKIϖ5k_nxmED" X."XTq0CXy% ^FQ(֢@(#'8>jK ^Ȍ ^`A Ɯ$436g;364ƅIF].d s!Ip(m~4ʛL.vv=slsf8_#sL;RDA 8 qY 9d%Gb5IiF͐= ւ"h DR$@l;4* 4xm#_11fmޱv>`*t6 `,`) 2 KC&Z(:O=t aBg ,d 1+ k|@N@8G%ס{Ҙ180Ǘa:hئ.#Qt1CE& vIAG)v X ]"$$ɣꈎBb%#1)DXҠKS#1s18,@W[Y\lLJleNj/>([e-TIvFg+!9py4ӊ%֬ŧţæ.P'UTf׳y8/ȭV,Ol\D w~yоKT1R (w낵MP ;{+(i/`i_ש B2ODӹr<ϽD8Q |@H;c bOY K󁐑z(b$M!xBsb"(p1LЬ PKf5aR \ 摅H>*5eDD J 6<)c" k֍1rjKy^Q#ty\֛0ݞkmF/~fmQ4{1nlɫGR;dvK_ByǏ>`ul-R-GlWEǺ7_!] ӨKy*H{C֕*TΐH6)h>Z_}~SU:dL>'F3q8YzwNX'0yτ?hS;wy*#O@ ۖ5=ƝfpjĎ8+Vn.8zT *zaً 4r^CLI!E$<]fMsS5zSmh< Cz׹ s+sy$yIrG2VV)dwfs@hPY_fUr(mv>"21fBbƇK<'#YvdGP!Җ9FrL99t:w ;;#g;37+ `\ĮrsHBIaEh _Gbwny*={N!U\.N$VF9)#Jc=ӞAJ AK&1w.<9 (Z)&`Q*Kdpm:a Y:f3rvB8%`2sX2OHVJh"AN 1h  LI!Ip(}8lC|suab ]ܦ L1f2S#Wʽfj|3Ò1;1˳ScpOfR y3`iFJ2o#SG; %,nY4,V_*-Π;܁ }՘+ ڇfQP1NhÎ|sْ:TSR67cM  {X^7unFe(.rف HI(iȞ϶AIAϸQ 9N-qqp:UԨFJO ADi1ikc"TJ Gr͘!!P o؉dz}(W;㉽__}<vW ^]Pz&J`u)C/veeIo K*2TX1p( wG+Э2WgedtUb@Ur|g>4"oK"\j8"wwЦqoO[e0Mʌ1cV5I }PVgu Q0He}xYr۾}45IXeFe7hu\+r&xd.`=q+͡(OԔ;m]3<(<$KS|.D1F*I:rbq2*+sdacš\JUh"9|J+]@ٸL :L9CBtFw[AǡX?ˎィ1LVQʔ+Gkt9 nCMh{vE~㳸fb^_?&5~!eR)MG~Օ@r1OwpV䞖nz "ԛqJ X}l^/cL4uzqK^u!g\2qrN[}U9OH^K_͞:C"8 mcR"-2[M@.}yInyy}'lpPL wb=T{N?5JnQ/oZtru}Q%MϘ% Ԋ8;14-6J>qQΗ0M(6Y^ӼEsMtr1*0@4._|̭˹]qI}5br/b'ޓқ{jFwc]k76O4e.>\'z՟lS zmaڈEr1et}2aytz+j㔟ް?ԙ-/5Ӽ?h}ARG߽w߷?ޜ9_}v? LҦp6.dB#]鯯ΛxԮ_32Ue',r48 Tpr__{ׂu57Z:KײQ&|~mCn/-~**ĉwb#>xc g [u4Y6ܚ~C'Lyd*(U>gnm QM@9HOl+p;deh,WcUY p2I);L((0}8trsP;ėq{b9E::c.e`غIw|!3ALo3 9p^&ه δB9p p /_hK/~O4Tpt0}t`t<kO0j~\j牜F_͐&$5eI IrVk,pՋ׽f:4}Pv7t_HP?zIBgM_{ALbZ\гTіW놙[cQ헵w׼l䧺 ἃB͟"t47:^sgڄ8ܣ='=p:Zz=~엣Ӌk/¬d<a+s2k_Xdœ1z%GkaqC5gL5'-[Fqkq$h1NS,9IK^Tb)AJF18SAq2KMeϕJ\dFf^4^ Lpgt131`@:1He' TxƝA Ja Ixɻ1J,4oNkc451ӦSvqNǣTnGҗӧesStz~,М{*8T*\$yrʉ:HbB|BYD7Ym/EA<5V Ʃ76^/5HzGvTNӾ 4' D ^/hS_`rWџSQ߳(BY-nU:)P턐 WW) Nֺ\%2PfrvePbhUj &:Ƨ{El uksRooeɖz, WuJgv&r+i!n}[]CluyG6_'+ox4+j]Qzuմz.ܟfl/q6w޼l9ko^MF͟nN[y'[R7_ų&ˊX֨-Xs>y]\ZyÁÁ.e|V0ǃ9LK:g\F4a(!UP8rw @9|8\-'c9ţ"7>1 1ل^=J JHeT[mAAHO葻MAH@9 ׂhd9;[l! UUL+x;XV5( 0up/FGLUa:YA>^6^Ȣ];eJ8gI*G7; d35it(okr{&_9k.e;Ɩe nx2nHoLI4VCMPLi_bpk@b=;)t5ܱ+;nkU9g|K9b6^#|6g$ʖ4Ga^BLTn=EqH!5?5`3fy:cY9{,<:-/)`pejDyYU+54 5JtY)pǨ-C^b-zI`UWuD$M2XI+>Dz#eۊYUI5PŲԠ B {(0- raЙUmV%^E Hrc00n8&8fgtڂ|BG1mSL1JI:HeV** :VYܣuP EduHc|:] 贚 f0>mo )54f @][ qtU  G5TE0 #CwF]Ua Igy.j 灣e`09q!_O QyWcD Vvr).:PP<*@%EOp'r5@okdXjlFU,WĊ"D W}\'W$SNw]u6!3n+v*BF8GRŢ4xT##>j'z֠ ː~ aQp=+NFp `NiuƲk(aFZ{v=_8˵*9^n?2(0SLGjJҸh ʵ5]r5WhKMgQ TfĨ<$F O(:rfF8ae%3#f@D2\O#0% 7aDE,yW)0j2zJ&`ٶ\0wR [0x|i0?mo6]s6DU˥η1.ńE@ZĔpID atbMSlAHҨ56QF+z3!z cTvիmy]ōzGRe>xĀ~0<knyl4嗝8j+f&3}#ׂz |Um֟egwmȵ,s7n{0c{~/J=T߿}珮 uDV-`ƊXqJߺgRp 8%7BZj+]_N6\bl/eYJN*Z$#WOk>V&ycKyS\udyɲ,wIIgM"h>lDr2•nVŤ v m' >[  9!prB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'$rB"'uBHZd]9e?cr FܠJi G?4y'AVoIJFNcrzpw3\_d h/_c:k5u7|¯Ǿ'0lqʇ[5[vb|4} J^ttw|jksa~;m;JGjr"."."."."."."."."."."."."."."."."."."."."."."."հM4X!Ѱ#Z'޼QzNQîӰjWr_Gr_}4lFip:*U!4:u+ԁ:*őO&ٻ^1C1jJ";^2+q?ʅW=grKnw Lَivve%mF ֓'m=iI[Oz֓'m=iI[Oz֓'m=iI[Oz֓'m=iI[Oz֓'m=iI[Oz֓'m=iI[O ~xLzP^G[p>m=Uk֤(w ߜy ^;pGf*@Lxn븼*0mmN2Vz(Mc{-g^bshצ{/VŨ7?9偿r84qǸo8p#ogdR::[|h;N~?7O)]nF*I'œ: 38*&ET$Vjiʥ\2BG&_E$fMܾ6C<8;Gu6suSuִ}zrfwy}8ohKy.˳b'K6fHu+ԞbVg"wm#_!if_h}ȇdfd0%8yAWMR-ɒEŒ H$,vWUc_|[PN TӔ:Z_ϳn:{n))5۾#D]ݙ|ٿ ݤ2Wt]m=}Ql"eN=!9#ĝrGiPXb mwAOzѻNq/+VZnp״; )J|DM&P(7JD)R۟gU)~3'1]iT!$N]T|PS⧽هv׷^2ѻIiV3-D;dl-~Nuņ?x⯋*8i#a܌sW(@<{=)򘷢jWӪ$Ս,QfO,֒r*};/#Sy>-(dr$svę0i^!qgy4>p 0xJ1; l=5o Yn=7^(/4vn-d29IU<5kfj$TF|p*#rC>M' DPp$zv7Z {:6~3zLzwX]$c>yXt5 3u$C}.MRgMZk6&'}Hr1' JNE2Sy:M5Z[IfL;>%XvV(OT`JkZ4ɛMYES֢)kєhZW2r tjZ4Ȩ)kєhZ4e-MYiiQIʫ{0AYR^Ko]e,:u~?P)np09sH[G‰uXTXGycZ&ĵ r B($W1pV{tHbaJUֺqOfvz^~O忧i{j~pP]5lnzTBI <޺^qtkxa,Lrm1 [lonv 7ݪ|,OcR >-oRk48jI0w 1 EDeNJYv,B*%E>RSFDD b FрG!eLD |`ާχKr?vx;';Fhq܌GWeϬ(;ow#cDDQ2 @ $P@\%Xk*P`p6eȋZ%iSа"h@QAsgl)*$;A Ut,s/YO@(;Z G|" 04eHPtJP]&9?ڑ$%4vdcG,n(|Ǡ~فb9Gy!ʩj"v!!X HK|c7n.Ch$xLZ}io-}`+osP"ՂK^ 0+D()"X/Wl|H5{Q jcH${Hj2k#`*47x$B,Y`dy&CDX{0 DpC@,s$#5+ zh~s+_TSnEEb}7IWSwk^qt?¯`a@,֜s*Y%Y6F7iZr25&|zVF<5YDQXq  h%.HF؀ Mh 5͓qWqȚ>0){#R1S3Hp/`mFkf/EhBf7u#j+PTe׶ oAdMx%jt {C=q5kFoWB]nw/:e1jYRukW+s+݋!E9%gVgо,K^ׇAs nTzQk7^0j>\d}ۡpc_|[PN TGοg u"-Sh˛2(1Bu۝* BM/sExE!R3b@)yE%vGN+{9$^+PX+n}V|Z_,(M)Ӵ:S0Q 0( H_w_췪7կz_d: j1Sv SA OAg]z]lF5BA|/W\']l7/p0i0S`nVU0w"osϳף^z|C+Vk=$٨ndi~2xjf?'`EXK\.˝ —D3,o;+0SrhXSTl^ |]YpO.k͛zRc]kCMZҕqup,j yֳe(/yw_-Zj 8-YlkLѠ vea~_R)ESDnTE6l|+*1}ˆrqI^EY2Q~,=%J.f|^I2U^v~C*LAPi'{ǡp!UuRp± =ܖ:q`U`kTKQ25 D4V{谨|\?C1˘Qqg%&7$/sH[ TʔX62v@26.9=mYNЕοUh+y8wwq;y-ԫ4f&JP1!!rkh̙iHH#a=ݗ׍5;jTTFH aT=e$`;J`EmԄ&dҩOιz۷^5{8F!g{8W,H~0>'3,;C议QBRvEUϐ(L$#y~]#`i@m-`L ]4Q%pħmmZx3`' S"F""ggnfNAS.%e)9%k,!7dﰅl{? ]h\ЅFC#M:t5ݐ?I^NrwY!F6,L:޴_2ڏm/X,nՖ-wfѴ@4U&W].#+b|>?\T-{nPyB8V*:rjÝ s8҇BF}.Lv29 *io!>{/}&sN@).Aڋ;4}]6xg֚eYb8ML){"+AsTF.I{ΎH'`đʢM|ڴm_,MaMIee lJwБc F@#uZ~l}d&Gjy!ffW Lh>`KS.f$Y $|m9diȿw5(NFAsRZ颤94m,$!it>gniłN$,;F2 (2`/db.HWed9esठBӳ|XMbT;4J``QtMJ6qAi5Yh%O; }"zeYU< S{!z_Gl< QFsG@3L쟱S<Ë>}=ڷQU&Um,dDmY7w^nyQ:ФQs~9%m֤I,/f^,5^#fgW/Ւsۇ{=EMd:4 ͤpˡMs'iɵBʘOW@l~mnn</i]v41q;_Ñj4)iG^-'wu--)}sK뛱nm3W||q#m²GËDO/f Ω v{AkuX mi4yn|6@tIg8U|W6FQC4cQ{1_H?^^:z%8!Цn\T1,lF&eɊQq0mrV vMŔF²JWۃ'ML&ue [t7-XWMS{SiZv9ߧ]״߆epbI^?h<|ŭhU*6C4Z恩dVyTpXIzL&&cl$ 2hc>lXϻx~24H~eA p$R&)q($ ަ ;gepU P8mg(˘٬/aHBk#}܁N:(𦦁ZYʯw? TI<_^s[PrOݓ hrzd7v/2nZ"UQ.s@W f2JrGW&}L,kI>+nN-iWvE'2NYwVKo2!:y./} |α,k`ܼ >Jҟχt}ðztOk)u ;\;x]Bjx JeC8g&==l۹~wL Y@RZH>!f8UɎIR1# "NC%`t#8 1zPR6s9/(LNJagݹi"{TE)T(Trbܠ.* ,s)ER@:L&殸IN^nI][ R sNZcF;w3*~4Z&N?Ӌ)̆>Ppi8ξq85d6˘ʔ`,eJ"gb4Jt2X@Dڙ-?#-u$8̵KgQɦrv,m/{x+][汼[Ӌ O=^iv +{n=z'~ZX۹jYEIy Z0*1JAR%]-\eB !r<)TnedGfWӨ"&xpql@I Ŧi# Hs;w3Rgصb3s+:eY}莜0k*> {Yw\ -=v}"j"kSֳ?M:@1 YʪX;Cp-*LT6E].Uii0;Gf1gofxzE V`󳜗 NNqUhҧ0(X<ƝU;}v(Vēi( :N( f ȄJNً(4pBxICLRn5i%!,I998=<ݦ 5!L~) R2KnEmrJցYYk/!H2.g iae;i\CFN<ӆTJ.'Kx98)nJhwHϖS:3z\Ant$ KE^lqӺT(~l0<3.&uk~|*Rs hrFGHcu!֞֐ASks?RQ_V璓10IK{qiZ(%cBSY2)A\/ :^JӾ6Q)fR!saZ$ ΍LD&1+JB\Ug s R Lr/cD9.% NYwAj,:X$I.f`b:, wIzCʀ7SD !0oS/(LyNah֤!$dHX28IL8 F_7_1vF VuG[g9SLxʢx8{x.k%r|Fթ81V$+hSYR"IXq3 dJ=v¼sVAk">xߵ0ފz~㛧77BGe2Zղ+_ƓTV?Ci"(/5?_{o0}wi*t<O.w7c?n[԰ڮJ:$_4=3LRJ+UD YYtg ^3{-PbtN2`㡘]3WoPɂ1 w:Lӓӛ45A9] g^Nz6pڄ~BqΣx~-eITɎVzIPO9lTz(';PBkJ!ُN5Aɇha Q %H%$dj*J"\+]a^"Y.꼿kT=;_(K=lzsO*>g~g~0\*_We\dKu=VDn%-IjS,\L[ktzsh]?ws\tfne2u)n{Oz^5yuӡF0LW_sTCzmyVʳqkdzuӮQ-syI7noɐUvJ69[JDEJ1gr]&7PAGgX[m.#NG&)vzUgTI8C(Cb(Y1J4S-2%7WGC:ō8` ; ZDl ha[0̤B(xAcu0"Cׅ\>'\~>1uyy;XIW-PpHO ES;jzUj xu** KBߌop"gl }&X7$`BW\ߊJ y*Q٬5oG\qJ\=Gwi)*=+K16_Z@̒86؋]럪 ▱$n30 l A)0X?K86?fŧt *[l{_SW?޿10 f1grH8+zwܲνM{MQ,KĆ%IJ|I^[˙V0#)6gF\DcY+ea5jR.u{gv9~jJcm\/0  _ӏl2_40sw=[%n=0r^nT{HL1c>3R"<GvݖoE}ׅaݓsn.>ruGSϔ!Op f47HҜsM]4FCߩ g"O#2uT ΐ'rҡAjZGq 2װ?PAj<)S, Ƹ#aREb?"D4]7rv$WR#EmZQ )tG?[-\O$}CD0;DoU-O@@t<}'"9?zm5u*(ӘTSdB VYx޶kɧ!K/y }'!8 Z }ف['GC)hoqe0l)B܁F'pD^xcq009ʾ5{yQ\tFt6(6RI>-n}[Z%dK Vm'` H-7NSˉ8+cA=CcR$P vV)4FƐ:>zT!$UFyT*ǝa3Fξ <Ϝbab +ʽт<Ep0ģ~Sͱ13N  J"rX,X,3GC{y9,Z47{ƣO?+P`/8*`pc`h=S"!&"Ŝ(׷Fo4 gAcRc1@v,QL.F*JO߿HA>mAK`}o2 4'! P ڌaTydi" ~X, }%UhJyo6aAOa; _صAy%R@N'h=KA9Fix/W6.Fy[YWه//Kd֦]JҖ*/1Sq& j1FР I&HO;J !ʶb}=Wb9-ds6(l[|KnCv7Igoø,v }(Y~7R˶_/Oq4\u >{"O0"n7Pc`~ u˽)0|ׯ dy1Q;Tc]R[RJJTUfѧssmsK׌ ItFr摍似R"z -RPBT$t yk)kPPzW(*Ϭtd&#QHYu*0ޙtX$",i&Z'%};FNGP55"/wR83;M:p㩦@p:0[-1>;nN)RBp %d:C"؞'l:)r(C'e1sB'fNis*$auˇQx皠.3bo'=JkC 8mB!4hyϏ6;)uVZ=[a7&Ax=C9ه]Pz )pe>[3_D 5ݗzQt _BYMƨ$H(. h#E^y֦=;_(K=lzT&L(ޢT1aV>Ͼ^xpeYd n1x]oZ~)>ɕݮ,|+ձo}յDn%HuaK|jZ'mHԔD b FрG1AzotLf1/tNIt^ܧFx.a'\fylWXqP"ՂK^ q\!JFI)1 +6>'2 H;od {)FAi'Thn,IL~VǶookl?zNSXw<&7{+eNQ:^2qoVĢQh9E1BZ! AUq" _B2E3  xz<U k’}I(T}Y m>ͯ{=FbXi2yq[Y~/2tEӳkf3adS2Ƅ4u1}icyj$1g?اi:x1|~u;:FJY;8:'[~,@!ՠoMQO[1L]JdU2^ a\4rroj{jC_s"\*ݕ0ft,ުl(cObJQeew=>)ѥSƏmuqV՟ifۿM&ӿ bN7[.rEmfqbjS=1 t޺flXIuI"p)~C>egw[Ui',lRE'5|^w7kRi(>P+M3"rƳ ʶj$@Q9)PԩQU3iaDŽr%%7H2ckK/F̼Y߀9ʂXƹC`}$J3Ĩ&n.@Rh% ԫA"xa0=AE/*n$=³Uo8oφT[|bO_)*3;] eU^nrNF]ͼ#T6J!۠Lnޕ$ٿBSRc7vbLCȌSZv/odaDeز]EV^ddxCNKk,>oEF..Rxme TPEՂ҃&5a22IƁ$3wq v_̘֟gZ(5~/O.dztVbwűGGτ*rP{52s.C h֩,c\Jcɨ\^FU;i-m}Ͷ{km-Fy m iiaqT'u2OvUp*vj6/||͌bWS1K?:dyN|M]p~Wr']\Ҍ{V[iYkčpmw?/ _ha _[#8ݥd?;_W'y5-^|jբ @ո^ ~_j]Zut~8^6|Q˻6gvy1.+'74G\^LMǵ2GyU0l/׳~gĐG;hI [R3ƛPLh}$olHBiEhI), A%G'/$~)X!PH P8kLbSȐui!iʳ=5~)q/ѯ_ ]]֧϶n͍Fv>`31SWxʚ u}Qә"raLM TxA`Ic>eGQq\hB%IJD+A$ u1Ԑڡs|ϣ<<ښxqy{3^ff&^N٨r,- 7:? 2 [Dq ³*dXcBhL["w@459J 6(#ic(d|(SʠՑkpgݹ_Y/@k&W_!wSܻ9żdz^3gbAޯHHќ".1T9>][BVuQ7Th&;"(;jK몽+ړҺ:KƛL@aT>{iQ#HmFxL_sKs˯+ss:z:<9#BICDcDGF "1B$-L'Y*u.{ %Tui`3yK]]^Z. o2$ym(*f"903'e {N2dTucS7Uipdd탟n4W9JwmXJfgݲ.8+I(AYs *)ut,6.nKr{2Rcَkwڞ<j9?9^sK{,I;unOI&ƈ$k`Lq?}ޚ_أۣGdw26ƈ@+>zY霳ZK!X-Yjm(!GN%!fǻ HRL0y:`DJvUwG_KT>vdq]$:=2[d~ 򵼽pM)7^!IPs%IJ 2t+u灌Kkh-EjT~zϺBe|`AT9,C 7:&7~MBjrB].%$q>%J^ _̴WޕSdb ^گ^nW`4c/o\ǜ,` ܄HVIH`erL)或rlLIʺױ=ұ e7Ӱ"kVnpgr*kvѰ/~IG@$؜b6)Ҁ+5:[3~/nD)QR$#52/(({Ҹ Jzn>r֦8G˔,d,yC{_2MF&e8G>Ze&lA+zJ,PRhTpv{U*3تTM0RiۛW]NAC=z&ҥ3>{s0ikw=[/>s}o03C(z7!c|X*bvYAB(IFDԜSB/4ws u!0!PkZ=~Ca b [6>cLQ&>襻5c]fop jxkxɋWm[k1XEΑ/+JrxikJF#e^y(fTΒrrmG/!B :Idvs_ zOc~#ne}PnI^XYЩ¯(Bh h[%|*ĶTz^rr ɴP<߇[X^a%5Dp ͛b}i1{?/2:|H*6d~%x6O*ւͅYpYk61M 3x}!Qyz6s"~l<"ǫ_i6-\ + y-Wߚu%^L',xU$ǵW434XWNJ#zpA6Tq> `T:oHz&,R ut.;>RQN,>MglώhŽ,UX$Mơp.3OOxV/m5Lyy>yp[#~}m^իz獮u^4hGOq14),伨_; ׉S^> Fձ)p niu'ۺ>tqu`vGgnՙi8.p5ל#d=o5#!ֵdַ57f ;VLIkx,U|2}^NpipֶV\꺱IWN9lZHmذerїSLʠcy$ACP2? 8ɏo~xc'D*ޜ{=8jPDeC\vdZ6p:&^϶;guʜ1U֕8+<z8&&GO>-_vJ|_inoٴu^LCu]yMnmXg5!^V7 +;IʦM;؜vQәl DjDɢH)~͆ngMC yvP23( M#z/$H$?^`Br=o")bLbBi pd+2I^=?˝Хu~pt SQ-!mx3e-B޸\BR9R@t:Ji 6R][κlߝalag-m!ppգ:eEk*oL~l4]?4q4| n 8[jf#->z( Y 2ZN4l,`_sZwuJbVB@Mm Sl s;6/]Ft՝-v5/V3jv`7W^iLŊI5 QؤXRΘ(uRuj.bF3dFE&jLVh8R="bdL鋂 u~{ؒeEjv%ZD۱EE-&϶$(J<:Ju4^&8+RDFme*cSR!Sȏ:գp$Tv&ȒNGd&=I@`κsExJiP}uv&%Eױ]t]F2dϊ%S*ٗ :{,˹89B 9]Ž]IǶwlvp&lgM?Ogt֮J{{ڎG?>O`Pzi`K*}(koJ9t%>7^!+1`UbV}7W7Wߤ2N^p4׾4s|ZgfD!fۏEJɘ֝4Y3] qȦkܘ/Bxζm#I/{ yVÖ\*{NEK %*$طuAJC"9-̮R6AtA0hcO_Y'0(ؔfvInuo5mr]WTxw~5Pȏq=1vzTx+CQŨ]CipQ'giQo<)c}^X4juoݼ^+H5r+㫯{rÅzS?=PC-S3P2 ΅o?N l lP WѪ+V{ueir,aǗN^N_%"!i6'%MIhHI.Tzk ilȡ H鷾cv纡;u & =M6&Z|7(UȺ꠮|>pQAy6"\FWD"" ;;t=pqԝH?էU$Mk)vj۬DŽvB1[>"\FWH+uA]IጕtVx6"\\tEJ+LlmᬫJ0+mp+u(̺꠮4|6y|ҠltE'~y7քuE.몋2+GwT"\mhmډ묫JO]9V'$^ͺ:; L{g0z3ucZ=RlEXGe/\2"leɻOP*$> z`1ÆZ<n'bX5OKOhM7ܓ`OK~F496c<ֹuE!?뢮 /<N}r5(TyHz釤Gku5#qi= tEi _$D zP']!exNEߺ#Ol* ǥw"ŨGC:\z4DOM{4ACozsnZ.ӻ弬~]ֽI&bnLmMwhg߽KeJ wo^noohv v2Hxy6MͰ`#1_,V×}OOaJo..g>x?\]M˽˗_1@]ct0:.^`t$jd54~@9'Nm}stv;? ~=xb/>Q44VX>QZ~$}8FbcM ށ%`jG?Kr3/t.t=yKP*/דּ zkm!$v9^$rqwrcV`>4232p#ZoRe/w^nU۸X(h:o!t 6ϦwT?p!^.^=jNfIm0JӝcgU?+%J֬jmU21@nҨ {dU,zpI}O"Jȯ?zJz5cFN\)ĉ -G)F"tzFB`+vW+U( d]uPW2`_1i ]! ҆J+>Z iO^WH^.;2BYIW|YrJW-]U(S??(ve+v]m0 +ɺꢮoO̚jypXg8J{舄 Z :$paG$/l'<$JX|n \z@.?bA =kltE=dM2QuA]sߢ]a3h"ʠ&CBz8S?= q6u,ttcCO!hHWO~*WI."ZˬJJ>06"\\tEZ+4:몃RRpFWk]O^WDlUuЌtEOgp ]K3H>߻ꢮttB`#p+6u]\YWԕU NO3Z6O6@B3I]/4 N8p.gǝl}NŠt>d\<A센YDJQ`P'!q%v{(Ӛ~":{[XV/nTM91SW.=9=մ&q>BbO+'d+Fc< Wjf]uQWA `XltErz"J볮̎7t F+w9.p:p"]Q]dž5tEֱz(Z+D"JYWԕy#]!0 ױ R鬫J%[/ X)>A= q^+LmnJ Π6V8`+ĕMgi5$+4g]uPW-F24{W+ Φ+U'uT{jmݴ\WէwyY& fzxݤa ʔOC=/g߽K"2.7o?%ohv sHsy"MK Q/gt>~ ̀\ 1lC|ꇛ+ܞ70 ri$o._~~rYmh~4I "gf^Vc_&`ʏjz@ ?-1ya`h4lqۿ٣Z/7lvҰz2+q{UVIo7M08+Uo2]b UloJE\Oʛ)3rC5y_>ZOy"_5F{y;g.rP͗;h'b>-۫[Lٿ~73zN3;Դ H`%t'[Vo}(ݪunvcx4ֶs.SC +jJr6.^4ڋ+e=o}|#7cDLjv)* bW uuCG=|«r>W\RMosi?ܿ:姫 lc/p}}Y^ڕB8 ]˲(K]+P\Uw?wo_.?FBqsR/xM5wQV"vx=-{Pt #^!bGbY\݂}w(Aㆰ0+nx.?Mg|G Ա~˟rt".wJC`e"f5.icK'UT.PO\>O{ {1O*ٹ}}IrVhg8KqָS#Z 0cu9_\[}u{@O%-ǵO 7Ѳr4Ҳ `K?:!ʉz"xcfܡ|}svCkp{-cdY* dﶉ% Hi Bni K#,`M Y:TZP3X4kZgAjkM͚Tq풚 vv50\f4:Urm(9 p;^}j"ިucUeucBz M9հ.)qmm_12ŨJГd4vgcJKQȲTE-Cwb[ύ~+8 IpiQı&I^tS$_K UPj֕mdm{SJwZ;seLB eGCP3 #G\3Tij;.O&貛3.z["7\m25wr6j{Z)XDF30y,s"dYDQ0ya*hqJCfasďBRIj/xA*'p*|VD"˿<p%yMB͋v{sbj(b1]}g egWӈ) hgEi]A$(A䧻.s{=鼢NDz@r=#ΰ%ɐPi9掻4P%)P)/ENz&+(vX1r]2"2>;Y('ai4(FV#)T& i)0 s^`hHk^'*DC0{Q)0^_㿽ԟwYw?EH~l?0VbC,$Ic1#_G9R$!!4!W32 p31,Aτh| j\ lV9cϦöY5[,Avf0ig8] 0C_,5cFYCp.Z3FDF(qԧ2 1D7R^Z2`JO%YApEoQ"qa%,@Wo @>޾Ohqm(HEqc),; qŚť &mPC+Y ]=;G%MK. pKiymS#VBYTjED#n  q$?Txi9[O5Noa@)'eZeeqc)`oLB,)-RHiTx?{RUT/ow y9%˺ Ԃ ݏ돒&2oſׄTn:ׯ Xu`- Ď+ף݃j]98_Jm5:|$IfG//u!ᯕK~ZslP,VϿmt?.FcoC$OETiʕZfS`$k=f'?$Er%2ټ2ـ#>|/+p\Ӎ]wۻI}G5s@ l]Lw-tF]Zh!ĭpO飧Q덫?Rx~]DD6)Ej [J~IB' Ցj"UG_X z:jս` 8YGT"8IW(Z< -Npۭl5,I)L'O\AJv;]#"bzDd#}`[u[+!,4Ytb\^,zfŽPG9TO_F?n2`q8b MLJái%">幦qص٭"D۝鉡>I0(QHT.~ 5%U/p^H.iGryމTc K5y,z&3ӄ B&UquEio뒗+zS5*V‹W"H.riE; W81=VӪk \ePI`ATR/Yއ又HtGSVצ/RЍJ<wKV^pAE؄ "ŧwB@RE GuHA gyH[sjxʚ%Eer f(PӺ]NT{В7e\O0<BxϭWwxE&w| e >˽t蘱^+;8ؤ<5Kn^O٩ϯtw&)oM=UV._x%tئ)'ҵTC{U VM}KyCL)MӸ&sM`s &-M}׵9e/T(em }% FZ ^1JklցX {X[OnN5[5dњ< C/A|5vjJK>#`=vml88~< W `6du 85P.Y `\4[}ƉU5,qU$ssJ6'B%g|OHGd!l&D.MU̦L_D y)NNa>Е$}8_IYM+/;z/5k"9'dRM>jJy ӐE,q($\(v+H%|9tu!{yN%4xZ}^Y^ܵ{IBTJ(`TFr>k8j{oemSPVrv.ѸO\ st;abybAifXa=snP;ٽͮIHíuQ6|VL?6bg'Idg+(yAHjmCf:ge*FH;󀊜a!~RBUJsXw2x LiaR:[L/JILT* NtPB %|FmrvF)$u˭"난W-/αnt/=4~ZΜް@Fgi)toJK0!ˀ9A*E $$*-8 YoJ 0v[@g2xM7v]žTTwptxq5n۾(a4,I@>M_l<ŠIص>炥B|yX=A#&ywm?y)OP{E%\X]D @|^Qi?<}@&8O䌳=']jEBq /}Σt0ApNrFX^~zYUn7C W/"^  &Y{+ șB'@f#Ҩ ev?YvÏ+Zb~ד$gk j+ߗ .zyRmrç'-K; R'q:=(,R9b[oeɁ _|O>"W&0<\;V|o܀n 8 簀 F n F!T+( ?.ߖLU!f2B~S5*)K -';C2|8J_yF^0W4J} wSp1V*%ysJT%T\};<$:Y?x#^L*.̮^wLPt6C; vI"dX |[%ҷK S9G>B) =W4[+zĉX@(ޠOn@ۛ,"^5tDr/b.W<8ƈ:E Y'y3_MBhWS9pu@b3,yX~nHG0,R#NCGTȽmˠSdWt 90E2y>H$V`F`v:@#&QPfFLE2v/pz yn(qK[[AU&HJ+q ai~6&Z,5Sb{=1띔LL8CoqiJ2A Qu S&xRX] яlU ']aM-Z9(c,*}lVvM!Բ=@K[4?B>mR*%WHf)A3eW0#J^U Õg?pףxyOc^!c9=v(+hc6= ;@K@VML]@+EH)Z}`HsY%|*d,#ɔަ-Sge.h%|.,XLv݇7ܒ? -pP V'{so]v櫟(,c3 #!NYd$U\xkUBJý "xS$WY 5AG-);-QdHE@7$*~L$. bl^ %\ =ݶ4sg{Q;hnJhOL* ʒ$glЪ7*΢dOk5JIњY^.٣igy9 WB1 qZ)Q`bI䪓|J WUvmi=kTbvvKm,"8dNPV[&""/QN >㰉S'*"r?XeBi<Kh(i0s`Ë`fC۱탯Yݿ{ AԐy*{Sţ2`Ы76ݫO+dv\쫿&_[I.$ V%j̄']k(80? ]Nxfwq~W%Il~y|g^5()=v$w.оAqYpj96H<ϜNJHMzV+r?B #Tc2o9lBsNs qWLytRTw `kҏǤQL"))OW1=e_ѼTzeuV25Gv"y/Ĝ~I9i*cn5ZUЪ1.tnݱlFcwi(da=\R#jsCU0o~Hvʄ#k% ?Sw$mkqyi k oZ&l宅_]A 1`HSAMS-YG0ƦD#),4=i'q|¨Ak4ǖh Dž$2'esʏK*O M˫$n'i_ӲoDjeTT')/g}[nd)~dgeһ6HfSN5+>`%*I2ΡT-jm)c4 џB~[7k_1'o=o3 ,5k2_>f?ɾ 0ˍ0wq l6cI;탺=|zUgJ@9=Ϳ=.|Kn=?_Ut&:=X2H/s({q~:/4}o  Hig !ÿL-LV PGzj05Яŝh4} 5dPg-ӓk6?qusVf SN8/.'8z!t;wN.D__4Y^o5XEjFtvR*Mkھ+8|*HQnkM$\;jr$k˨)<%[W\+pd5G*(Х ]CGeBZ5F4ڄɤ& @?n*,xx%q 0zK/f9 Q5=o c֚9zfgB@ 5o6u0e֊#`E!x>& 3ů_?}h_jqct{Kd0]^? VFۭ| u_Aڛ -'@s.Dܣ漑-)aWXe1GZn2_4ո$m|E,+[\8̞3pO L Vۗf5&ېEB!]&Ӯn#x,1ۿ ]RHgj2W_+:o4qP{ob+ zmnpw@i'$-M6]UEdZCDՅ,{z>#!a<8`d‚)y }Qb_\G=LB}r$y!=a:BB51kJ.Ĺ]fwI!_'a^؀w!mcP6rphghL8 -2^~.FZI5'/9a5 L8,*;p(UT{ 쿦<T\rTR,"#LirFDli4:("v^.aݥIqe޿-IBd ο-1\C["Vw0E< (}V,'m n%TU)^;D2*k%%Scd2 xȟv. `T!cNI[0aRڮcթz( #ٹVa&DG]V7OGΔ`XHYDžu 5lwe(X0:PA0,}v[%XaۜI _b,E>Jm#GI_MF`<v~6T8 ITKAaB#i)47DmʴW"Y9GQ ӰЪ{ h _VMEssc͑|F)$vSfl  !BFEnO}&Ap]7 (Ax Eg圯ִ|iNah^\ιj8:)*wtwΫQ* ,,%`m3eW8og&JYiFY4E/~Wox [3y8H3kbX%-^T)BʫYXO5)QLRS!*qB,Db N)LՖY-hdGRx?xq k7eI&hQ#kqn1+;"]?MYajۣNcAwB5cl2|W%Uҋ㒒ޅA0ѭ!"67٢~* н `2s ~F;i19vti?[ HtWAŝOmW>)F륨O `D(hhg-|1NuRc*`TtU1@jvѨg`a JkMV E:la3p0n~)%C09rҷFߚR ] " wG @%{r>/~98,/OcQ= Io̩,+'5I2'sn K?t(\'%m;pqXx. q'NP2=dڜ 8AKrا$A)閑he2R4y"Њ xNC\\(@@ =.S% ^'W{5F#M:{{z!k>X+5M7J+5zG_k34 ,X$wI#")T)΢8꧰⃁sn z/8jw$U{.zN$~=O}@BJ0BYDV'E t݋QeUd|j)TB*elnww1zx sͺ/ؕmG8+ͤ0MeV#3ĥ9y-4䩺 a2ǩ^ wOr W!\E1}{ߘ/^[ B?l*{g%l+雨jTvK.-u =vhۖB.1#ˢ3)PU!p==54sԷa*p &|wۈkA| M *dGWQDH2hGeC{7hļFǺ-;JScc!st׳]p \aĵup|M=6"5~D6<,of2 -[r\˻#x퐬IxT+dVcգD8Q4B ̀h*t(Xy eA3n"Y^IŻ7[Y$w4[X0 g^??͙4gB#j6G |hF*d#'q2UVw'5spL'D2+d+ȉޜzZװXSF+dSOAʤgK+N1=r {Fay)lq%_dvu {L$y\mYvϡ$[DJEE)5+ˮSUξ|ZhPDahFe͓6ntx)t4i̮4ܡC,'#x>\^j$r$G_.K:U jd>v>{66a3żzM$[ O)hnnǛo?lmfFVb GDT 9>_UZUY)HCRxsZ|&CkH O}:MxAU2UA;|9/tt4 J-hocΩp**_9hC["_*(.Β-)Vn[yT"ETa߸F<Z럅mo GAe'yәx婺*~b7~n]6$ȨT1D)C'&$H90&LIJ2Q1:'`ݕ%K)oUXiLW!OT8ˊJ$Gq$h$A8W7"Bёda_^J ~®6[-/mfx!ꍀzK2}v9mjy)w?Eˠ;1"zf&XnJ}KJ{FeGR )o+bP UmSsrtQ*S6 u_. gaaɯ dzZSȘUӸ:a9 bꍔ2T䰝FV@9%0zB? 4?QT[Wnbdds }EOf)P l5#N>8u-rCUw:(W#;E9Gl(D?kǣ>6lm셚 j+gÄOqd_.jj{q&5 aFUm;(\Fx'{)cDϑP0&4EW8vWlhpW.J`98Yܨ=dٵQʴ lc_%D@3v޺WyOyP JKCyG| ͏C6p%<<<5<FG\lj:yzO .Es ;Gdr9yP/quw)6_O`nsn#Q? L{=spRJ`\Qpʚn!ws4vnSYQA$oϮz& n隼`T;$@Iv]G+pxO5Ѡ&E0eV*A"&DZ%0ދFT5LTs0~Kk4Pآ-hz.`vv缋Z3`ZyV2 = ݾ I<$-9b.8LdޡX#ᘘfFe^co C": ]t2yj5'/;ƕʬ8A%g=zF`C_|z{w'D(B"%C~ izk U;snN"nJrhi<{^?߃ Oǃi R]!딼q3Mrcd?J:k%=,7^̱+ŒV aNN2Fɐwœ?4WjLU\SJVńsBP&;Jd!&sFC2=|łK@AS2^T*^ B>5- R-?  JMvm.@)nϛEY6tܼu7ߺQOY':zhvV1G=WuA⌌l(mngu60n:eE}O$12ld;a,P0\u#-ÐGS(T͓aqD\FGٿ`6p_@ZjH譖R8`<ŏTMD!yƩ|nۧQ3znG3s~.,N;qŐ||1='D7kkm5ŹXN`>[Pim<+J |%Bv4\y +tl)V9FV.Ʃ@I]LO{ oMy4r`aOr8 '*IcHud&lL/N`N,4w;x/(dlϿ|HDe:vt[ojPÞ8bqӀH!dhHP) 08ahFVE|) B`/p%wa5mSݱTYg*DXkCuƩMYӻ6f/@pLeT[hkdf+d#DIiBSNJ`hmG&y׀:z۱M L*JϹ5EMVЋi8a86p uq: ֯K:#މoǼisv(N^Ss{cP̴VC X/Qrʆ4-Bj0NFyOPijAY#XwwS֛[fciUms-qy]Р!'m8:#m >YryCT އ64d DŽă%yuw%0I|6(3,C3Lhm^!䂗Òj Ko;aKoQ$|NW sI:Z"!,>H@d=UB*I:,Hm)E}aULYNIMkP &R)>%0mkOKHsxV_#wtr赇PS&Ҫ%SZq:DN5€B[Y<`|x ;= EVZ,3R.M!]eoRNh;!_TqQPu8vD=ɶV eTW9M> +Z]bNcF^Qd~".w] ަ`H'+X_NS!faT?Q! coDС[Ð%0z*Wi4fخc#h4۽{^l{NN_ S4pY=>}&ǷLw% Li"40m56k$gM[ 2@nېY;?(iF梅Mnwqs'=&oaF Z/6*l\ӱS>$T?2~ ͻRok`$NͯeKmL8P5v c2io9Ꜳ=Γ+̩ˁ`Fz@31"qS+8[!63Ö8:Fy0 OY?uҨ&BtBF+PkepǧkCRJT08tf!P?YyZ.f{?~|$Ae`>dk8 %(Y'ͼʦ]󰡨ڝƫeu$5bmĘq+a D9::*QҪKiQS\iW՚S6T5 Tv9/Q>@H%ͨjJXזJ4 ŎBX?GL}9~Mߔ 6ދev C `Y",A :,1~|Or0 ՀO }{X.gowwYpDD_QWX"ߴ"MPX̂(Yܝt&k2NAlla4(J0OyN~5niZ\#j6삦 T,-YC8d8zczl*%C{/(eiT ibRc`|5o۷nYg\>>?}œ_疷dIwďM·L(7?&vAʷr8Ư+]dY>$ٯÌv?sU:޹*Ϙγ#M-<=N&aw/Џ8gf\p4kB^|^+?R7y0L Q|[?kO1@tǯPo_λ'ſ4{ye)y{#{eI:HIlcFY^#$ @5юnZ]׊VVF^|{ Ʀ:Z(zV}V"F; ',7a/e^Ԝ◫9լ{<-gͅemtӷaҭІ=/G0]Xٽd~5 MAGq5c55,a2_j>l)Kklta:5 R1JJц=6qJeyM&lO~s31qtKv-hA.ݯd\Ӿ<{\b]ysӄb+:;;iF~a<ß:Ad՛23).w=]d`*$i%fԣw|>>*h_);%ŗ Ӊ[$ah65WVHU2^Yzp%*ٟkFqa*60j}+[@=UM=(2Uwɳu m#Gr/˘ဝ8-0s؇ &mDzHJp}nnv;IE,XBWJ[z_)fhiFW#7Hv{4W6 vr9 kČ{*H8\94L#MH`z s?^u+1C;^GyoU_f=*^-n.Ak=Nb܌+5 eX d׽|fztsx dȈR}<8m&pXJS4ahG_ED, Lq&L)gx9 %4z|X`JZ )i nri,;5!&On7V.!Hٿ^ n^j4feگh{㼈MGIZ0(wN"+)Jx +y+m8Ԕ=?|Z /B2s{ˍϬՀx`I$ۍojOKɶQsMSuQD\t%mJ0JHm*'I9uтSiviNzq-1E`9M4mW2lI$p5^QD81<.j5$HqMEc1ѡuk,VOΫ@vi:hLp#}hm;FLt[d)BqYX!g~<+i |4Rah wʨ{ƖϷCa߆߂;)oa>[jw2h<30bկ+,og>QP|0%R[%JAPK-2Ǩ4JJl;`b's1_Ikel/yFI][mb3DNb_t ;SWwo 5˴FvU t{]U-?!M׃D @4q&,7$(˜5NJ"P@X ̸al*=v@\j'toh?UڔRh~y~\e\#nGiўMvKR; L->D 3Giko\an p`0ҖP%@E~7?tj_,%U5ǶQ{gQ~N*BN;7f$i]-e@L)qIRAPuD"=q-LڠpbtqK$cq˸a%Vo;Y-TF5k›Tv-o2 G:i>mtL9ϪL?Z9ep4>nuE tK:%ځqcmE`'i.{.ޕ;EK*sUL,8VL7zq`VD1#5 6Mg'^@. &Ktb=4iNVk[5j3l/#?`BtP`uHoJm:xŖ`Z&rMA qmq)J`_WGQI|v8jA=~ s1; AÞ ,=';|)˫RدXA2e\ea,HahNjo*͖4J 4JPSLZA/a`Ȁ21j)S?DN .%չIy> ~NW kй.L'/M x& OüRjy IeqJTKx:iG)OD}o&499'p@ζӉ*.QC|waaīu2Ul1meM}e㪷 v+R:K ߽WBK"ͨľuTt8U-uS85MlNSPx1Cք$z탺JMH11)Pxٺddŵ8T <SU?bO@)9hPQm=g٬y%Cy#NAѫ׃}4sP G_ =x#bl0cގ i6's_>ͧՄPp\L|pe{N\+G$@EѢ^d|țmMjt:Ja`:ba:iG&7D,.C]{x`ZuR+,*Pha\p~?w4^*Y]#4Kv'$iyWn*ht.9t'`;\>u*餩MxؾX@Z=*3fZyNT'k}AY 72sfCÙ >;Lpi~c\PFɱaj"J-J=ZمL*iH6o;㬢Oss41ג&l 4o&5zU-8CS܎tMc4 |*N:ݔ'ĉ֥ߩi]e>`ݣ(+5c[qj9?a$ {ߵfD Qak1 X zUw~є.oӫGҩm-w 9׉mxvQ쫉Pɺ 9s4>ZC[ ;ƫ}i4~h.#3)^4%I{!$!PY%pgcAyϒF݀[?>!*IwٮQʖ5^-|\"|;n w# XQ9shLn$8 Ex ^`S7 =>Ntp(x nwq9&DT+ HDeHC8e 0(W*[p+C 0g9hLhxy[(Mo7 :Ђj毫Zb'lug3ف>8&y2UbNϟ9ȘRxM:8e6c.D[NZwݩy $h\+o4T1pky M.:SyjSq8_jc4NxR!l9G )kE 0vC^3_-nʦ"_:POrVIÞʟ2!dV/[@9m2uH/sq.'Qg%6g\"I,K^/u㶺~Bۂ+|r!,w2;q,3:|I`Bw#Y[@ ?'& ~8%70ۉxU*^\2@;ECMu r*^dr)0(f1`hXNkM彃j /|5yZK2:.B8ǻi{zV`+X#"?Fnf˧ xs>\=-o6Jfo] 05>??T* ʜ 6R;wsD&UL\s!QiNFG'Al~=UoNbolu9@zwNm v>߯&3D@{$84!Y-dD<-'ή 5S=dF/WZͨ'_|;P}VZ&K]LR0H+ri%L97. 6T[kOGϮ:Ϯ/gu*5HPnb>_uP&qO'ǻ\:6]Tia!55 V @a?(QTaF_} $"./ ٔu]XZr:ݫ҉eչCB9a(@^Wx 1\Uǒ"7Hhz` o J聊YL N}?O6qVenEJq&t꺎  'Zh>Cd \%SU\7sYd"7͗lu:?|综Gu @9ŏޖȓ |YD6&6b焈`sI-w\i*}.#b\̴ lXP$v{oen (#WǾCYFFiLfZ<#Ʈv`ڹ5*c`Û84Y^5xr*IM%PR٢/^=14o{ ?a\:ܐQfOSw 0 ~ [iA;7 Lq6E>`ݴM*zݟ:չū 0~-%% cӄ=9Q<} ۚ6&Ψ*_RϷ϶ig҂b*:>"4FԬhV//.KYPqZb?}۬wNpF|--+C+à#V-9 rPLd0#ٝA ԝ0^nS{o(J'ޓ\W6H6SEHd>,YU[j%uWEIT"YFFn>W}LiPZ#m0.R 4E K:23%՗RԪ:/Seju@IPg˭Z~QfREΦ\j ˜'(T) .Bw*¨!Je+:UQzc %X^ 9zƯ(]*T[nG4!/֐²iBMmYUoΘ2'Qo ΰ0C\ Ǐn`Ē'x!Κ i^/ʻ.6jep.K*}Ged:ҭ]4Ed66Dj?{GYR Vѝ ?Kc˙ln|bbHR'N,nGMƫR*b'0gc Ad^d 3jM l.1& 3?o}UW\R˘c7Ū("%ٗXGM`\TBkvg? osN*mzEާE K1N1 !)J)ÒId9Bk&m1\r& YKLl%NGƂ T)*BG|k6j+ubE/[ ko_en]߮iUL&L&r&g68D8/2lU~%f֘bʙ(yK̼kUEӖdjtͷU24z>QC-MvPiui&\RphT^z2.1Sk % JB+jpժڡ;Ԯ)LYi,j@pQ΢X8``x4DIp Jƣ0 Ii5N[3Q! ZJ`Oco7tgmJD>T J‰,U hQΕlFT햙5QUۥgPva4ݢ/2nR_KvF[>Fa]賯],!E?BsN =3y܂0qn{=$P]~GAgkPwR9v4*ͪ|y 6fq ^4Ua3ja& KDSDU8{~+!ufyqYM6|'@Z^iX/4gv/(,O7JiF0.v%akQcmTZNϳ)2 9?X1fRKdF&NG0 J1J[8p ')֛]ѢXrxރ q_t3ϡ*aɑ.FdM8DǷj׿\J[6EZcϔ>řW?}{o@'{߼ S3oa{*[nTfEF,Lp ,a&&$"!suIdU +>F;4 x6(KF\uOs, ]ƆeL{( VuB$󩴒k,QN$IF D-h6i2Ոh DMFz_.E{C@Œ3A ..1roPn[]bsY l,ؠ8:_'e: Q%iƴH5.F >I4 h+\Di'iT>O 9gRFoga.BV0xՐ{| ΅gͺ 'NƃI4#*Ľ^M N7OawPJeyBlIo¨* TNeU*U[ZQ' ^Ոϥ#jaՃ؍_Sݢw6@sEI;e KVCEꮙ.M,xe J\UP%[0p.+R6rGUdVXf]]+rf_⚤Śd~u mH >1\,~tK4|Z_p;([ Uj+'v=AQ%MU)v9T%0,fTĮ],%4~$ V+R\ܾ.5J~c~OQagwPVŘ?/?&OS_Zb:v8qmGf|qV[ϖB8Ine2&9\|$ld[~nWi> k-AImr}na_ޅx -7Õy5'Jp? cPc?e3C%ƛt8A)IRo ïL,CD%Rl,V@yҐ̆t"?h f~; p=JQ.~Cx}nv;o1{Z)*I= Pc e&KodPΊ^am ±1[L|h!w<6>հj`X 7mUkl|Kf6tϻd>9Hd:Y6_ts7?n5| wFr1y}p6ˢ.tЏ{8c?OBUxĿG:0\?ѓ߿bq}3J(I*cݔ?ᄌ)֦me* 1Xdֳs1C#gXES? Ôw'2l0ǚs d6I5eNә#UȚjd5^0iO%-kE/0RZI/sO9vZB&Nz[r%ԅHbLPu_UBb@%U/t?"WK6_ߎ&i2:-l{_BAjr_ ]qo56b: /0-룠"Sݑ?Zc^Xfuv+Ke+ 7 TXLVPN'U >WL#_xIIT!`MtgyR76 G$BA5@q?zbQ:MrQ4AEAQk;%%&_sYuѶ;{_5aTszI`L) E-d裩j>U/cǮ{q҆y:rvVV겪vSJ4tYr%Ba#~)oN2JFSeGuu/}fIfmLD3ӧ,kœ!@3 "tNC$!Vz,(Eѱ.z~&|Rj!\1UMycBv4l*I(}/Oøצf$53f7@qxV²/5Ukw}J$AeRcuza\ P+ PǕFpOg*j*rv~R10$FŨh3FpMچS$0ȮzOψ" lsT9H v_NrV%4(mTd}gyVxGCQVh;_8m;FhQӄ)Roݶ;θ^C[Fm .Ee4$#lƩJE5p:nou`롤ۭ(gTF>Oq'ubT)FZ %$RD$nZ=1s(=0fe>ҝĬHLzG%sm$AF0ePTe6'b/$ # eptUXUߡa NU'/C/9X+,'b]qTtwRTan:v\^˼`j+aUQ VrG]8*`:fF&Xc;fU[s#UX(WV7W _iܟ/#S+`\塷}"4̂0.TYq`Cdl%l JUm$4p܈DA8wr"*[e騪٠&MWcRa"$; H)L?z!f6|;><|3paY4dv~ ޼qYyGe Ư*E~Qzd,@\QT`~p1,Qcv\7kBU$ȉ 5qE4Π3W޿e>{^t* 6;n+/ hHY>BLřx+WV֩Oi+Y:&!WިS*p0T|GAi4B /c%? r/7kCՕ,UT 2uֱ޵6v#"i,ۼe ;`'H:OmŶd0}e˲d#L[HbWź:onCw`ŷA=ŽK}Ɲv6x_/sY1SYv\~܇G3i^C$vhDo٣j2 ({v[!TZ97j!rGjŽ~՗yvpÈx ᙩ9;TO74q.`/D2v~wW:8p ;YzmT{&|doճe&Zlfga7ɽ͛nfrf7qdr9#r ï^(WdPN7lZy#HC8X N5P% @%A5mxZ@f*ݗ˄. կ<[7sO(hEd(&P\bKpf9s9#DsTb0~fc]3}iYIDžgD9k3~7+NR-uVߗPC|CDe^eT_u Q&"SS)u |F #uqJZ` 54 Ig1˭Я᝛Z32[̢o)kZ/мomY+vZ~W;tr ]ky c@`@דkИ$RdTH]!&$h`*t.M<RJ0A$G<Gy1gOY⌂ageL4m!cb0%R|1CN$͖Q)(7%6AYR^3@Gn}{#^4t6?͸]#9-Q 'y%KkdJ }Nh2'kL阝Z\\rӀ=Z? +.]y*Ц_[!5''"#XH<$FQfИQKY,g*jeZceZkDE9۬ph&ȧϓUr t:,ɰ?4ktHZm-=,8~^Nx,p"#W{lֆ uZ6@M.Bjud:OE\:'$* *aH Ti21Œ6eI,>6KfxpxX IaM |[<0_Y818Fci:Cg|ӧ ,yགྷ1 h~YRiA%__*4C|:]Ni83oA˟;<đ;`=廎Pyp2"8\I&~"JyQ١<peo !..Iw! ׿9r3ýXwuǐ-%Bfru~z%8QӞ?Za @C刼ίYZ#7RR[ TK(GA2Oalɱ!L<LR0#SPhF" oEamV/QQ(B"ZpK۳<ȣhzD. 2\P"S|.G7.P>⢲*KYg"w@:昭t19j)DtM:#?>f?Lb:mH`y~":V:H>O?L߯S#>UYjMVdWz*~u|p.r\1U7%`rLo5sYlAB )0OV%-q!DUBO hu|mY x"D pcx4' _H!XHb::xJW=UT$J(:Y|Ph"Q̮|FC4$[DO<@&n\pV%!~_&#o8 jH6=ize[Ұo4Q1 .YʵS-2*DY@L4Y͐V㡏ԭhW/*honUq%4)_ (Jgt-,K'ɻLdhBMYQS3d#'RWHI N2V!Ȕ`bb\VR0m)PfsxUr[ :cA*gqV,:iUlS 2#3_3'Wd2:(DvAL@ܔ[ma|șN"s(BdJ6k6K]Jce^.NՊ XiٮV"97{,jcK-=7H̕'n@M#YS8M,x>͑Cu׾Gt 2.@q23T iڛY@tӨA .T{,(oB>"MpE"Aǥ@2=2b#~IQR'qO4ih$R2 e4+&"l26xr>P(HHhKJYKbp47fɪh>js:P.rPJt7[fzApt%Q8FX-Zj-5T3ьGM}ӣ~|q:h>CK-7(DcҜ-8VgrGY'q= o=A]tu19D+j&Y +ѐ)3oT[-3!1Ϯ  ~,yǃ{ͻ"TZRZ;"}SH,OH778VlX[h6de_c֜&?X_ekh//q.B6nd0slAKVP)!@xr`lc',zGhK܌fNhKc,gbwYٻ@bzj. /ho #۶M&>.J[נʎ[+:cTRYdryBl>r-%b?Q b?٬qkIQv'gyW"ce߲aVq^8<{+lT"*mɠmʎ8bb ͏YM:h= O]Inh;cXofW6 {(Ssvh)زA{@È%Cd6jvKTq.<݃Sڮǯk|7h7Ǎ L szf*c*#%OI-{BCBRHI>qvX$b%~7+Y123"rʂj[})]7zEi߈&K f[Dh@E?=80ZFJȐZ}2CܡT,f >=G$rrrȖ=YymQi .W+5bÍm|i0ukRTQCLjQq'5=ԁQhU=KJ5l}/^Gս>'ka-,:|!=O !=0G+kHQ}0eGXA/2v_%OMpd$ d{Ez TӺ%5#[ڒ8鮩꪿}|4<&r_ߝ6v #%j2 ;'nrz6 =ƒXp>R"fHqO_G ipoLecnEr$fMvy;Aw45* %oLhj:PZG\PM%ό`)`V{71\ggSŌfPKCZG5*c ke9Db߻؞5}~gj!ه7 XwZt>/~lTGX?ϑ1'?s?~zjy!~N585aC{1ʮѧe{Z NJxo<Zf@XPo6 GS+|n8ѫnxdQ~XBuV].oƅZ⥟ @ekInW􆼁Pg-6 !Nls} JvZy_:1 I&9-M]( f~=Fxy%\LeplL{Ma'-R3 Ֆ@7m+.@eW|3yQSo֩S "bdR}GϪD!7Yޟlz%(- s1\)).e|/O).tjJ^yn LLԖ߫B=70`OL,/6:E66ll=}Ԁ[4 hྕ| kAn'ODq)HH4L NA%[P4`U׫ГjLJ'Cqn`Āp:GD+WeTp_yWMZſ?pA [|w';U 9q/%WᲸOcmEb#.P +)M"x-:5ZW}_Mo1#Uٱ{\OթԬ))95kΰR2h gt>e!kJ9^~{ᡉSX5Y6>E f5qo2V-u!oO;X7Yh֫e34$I=.&+CmL`%z%G]!FppzbBA,9$ 5{Gxgoݶhͤ@ [%huU U2xK Ho( ;g<æĆ*RXS-)Em Q(z59x8!eIK̖K8 >19!+V 7' 0)?I;i BU_tFMZb"VQA>2 'enbX(Q|BǤeI&FSYZyjEFȲ`zy.v/8e%Mo԰KҀx,A;Nu~Jk IU >-MBSra@kf.Up &%)m|qTzW_r C,`U4HR$ӏ(?Ž7d+$5 5Ŧ>ALRavER"@.)SB& W @8K]fMB 8o+07A#Pb"G:.2 rB|JX.򴜋Ń19EΑU0DI*F5F.V" [ Z%$] ][RRJ,8 2Nb XY~kBr/Pr+ҒSGTsQԵh 33%T744DJ.XaɦGQjU‰ՉѻVnj$ ̨FڡړQUeoYm %7KJ.@(ɨVlSI>K}b1VKؓQ1 6TV2Gg'' Z>n[T$d +_xJ!@I'F7LDI}p'OW7֪`UƔ4 `A5|S,c-هR en\RtP*f3qPe*jX1 =M~WULQsnHjɫ>+Zx+ %Cǽ&:-(Dwhκ`QYrzG9Ð0)U*Uhw;ѻ%d>뤰D3($\=k`{:ޝe>t*P74ǹblcԓX.qܛݜ<n3gN ?Oxzn~z҇N=^yoYg0Jmf-_P Z<8^ pG>IU[=fnj6`2'Hʢn(!`I/oi{ D1ݞ;}u7;&3M+w4N_ rC5 M#4ȪK3SnW[HΘԧ=ScذHLSn9x#$G#Qу,N#;L @ox {wD)Ev|Gorwݮ+U</>{oqQ0y|$gرl ,T*% :TRSGɾP8|b}pիOu(D էdLHubCii-Ջ@~E::1GK]]*f߳oQjLu~ Ɉ/gk8l@ C69N/R7d!O(z^@Sփ#42枞!GqrNN>/iKxގ>hV꣮rTO,\Cu9U=2a`姂H0d  لQqcJ }e^vcFnP-~!')eH?0:hLVkY4hV10/vD ٪i@ldZ^ڃ+޺rV#=,sσֈƝM2c~UTGMI: Tr5)nC5nmzEnE@lϘ>5%|Ʃ׾R"tؐr@TYvEL[:w>{rV5/5p1b(7 >}ʋ16Xًu..Ҳ޼0j |D2v;f 􈹁g94PzT)gwھtu73֓l*kۻ+ֵ 73(@TIC/hH6+RC˳Y,op^%)J5j~$؁-%rPdRam_5)I"c~oʁvkZf`sIsaImwUVl!9>ۤڪ\>5YourIgc@[HZ s)εcFi؇^y9S?r=_n)*s~rr^0((y=?8=Z~zYʔ~4OMu41kb8+ YrfEVE5Rg0A-콍JK=rW\XQwYq>h}בX=4 }D0Q@vU0Ԏ+?H(67}[y<ŕSz耻hŋt\)^dΠ% :'5C%AqvT}v&_BRMP9xy*6G>h "Zh\RLm Ќ-J$Z\ڑ7<Y'5,Z*)jS{Kb0FRsFi؇sόcܴs;ki:c'6۠2,LvOonwgU}dJXn ױڅtKJ(6sn.'MpӽNGzhg1Icع`&lHh563Q\Zc"mv9vtKt}ay0{,anE U22Gu0L`JmO&ɾ{y<@-mNޓg݉^]8QDd`SEeruׂpOT#h0jhRPU]tqŷKD=eؤKjrѰ_.Ÿᖡ7?ޫ>%t K-O,ybۭ | v4}Q{2%{xU󨽪u?5=CׂpOzwShx$aSK@m01PI290f&' o'Trs M6zOE+&wC0&hX]/L֚όJmruO䚐SCv|e;8<~?մ*EsaX=~Swo8Cb,lN\ +•$\XBg ժ˯f]VFɜQ]w/.R'aPMPqEٻ:r+yQ"")8Ogy4&8i`}_ek۝4о.WQxSoY˖PD6 (H!Rbn*޲d[0 p[!Ri(PeO~{7)wBà"I^MP I X"M5 0b`MJڈ\S7mq75Kݡ2C~tqZ]7~;HI.K}4kg~~>MYMo.p/.ZxyBzu{]ggٿrjGZ/ GvtDpWxmw\]lz@\P93;oV9wOQ:pDy9oJ`0sLRŜO2AY0 &L]EY"lk,ػWSS%+ROM=UH9gˎK9GLLSV*s׸Pض 9U- MfDe+1W4XyTcIoYUK0.t  ͇KAE-XE`~l翟 9z:?.5z~IcA9f)A΢F.k4>/: b>=xȩqJ@dƅA;FDגh+.ּR?V>^<=p^ߕwWn{r5MpGmֈg{VvmpItM]_ ;&=+JM Eι޽b%s6P#Cb T1RnܑK%5 kO,>{W%FUORnjU V3X`0SVE8ge.ȓ7)j|to尼Q#%1E_sE" ;9hdsf+!ќ= =yh;79MS[/p0׹Ǜ`n0QBAE?}{bd5]BEit&TpI7 D1V2Fw?p@5ҳ~ܙ9 r-=ts:A_Sad2zT`<6e:F8y֕(C[EcR50/.%\>6&d^wDyW.r FDoWR,.>Cg8k]z{@xJv^=?~̓[bQv}=7^\Eɭ4Zf\|WqtAD Q× Dn=C[ 4BVC hQ-5=6\JLH[%DfgοrD#9J| 4f~fxSy/U^^k&[ yۢt_Fk*=4|EX!sԞoƘJ#OsJQ#FP{wnu "V!cU.[Ń;lݪ7+@%(8itm AUv9SE;A{X /-K[K?tYbQ}YZ-5#nqBu!wԪ 7;׳znb?9Z {0H PWGJP84hcv[IjCh4@z ]ޝr4qlUS~s5z7p6~,1ΚF ѱj[&>%}>h8 lI@޶ ;$1U0kӐ&)ͭGP A}c9tc:^^r,K[J1R2M{fT*10f #m o۷Q:CA2}QMvA6ذA oeH3+we)%ϤXd %`"ș{JQxxog}Q}|Nxpx4dDzXLù!ʡt%{O<\1i}ۼg's`gB @b0FSy{] = m?J#X B[Jt6){=& [cp7uQ!#1r_^l`}ˆiU*N]or^SD+}fQKfŨ`+ܐ%`s=x*`2u#9@vX'v ~HË 1Ĺe$7-:Eã Z^kuv.<=/ǭG Tu98-Pp@4u%%l7}O$7s]_I ;8(͓ > eo(7ZQ0;"B _#oEa%-2nF ztk+qg6%!ԃvC?$B=. (6OHs6O,E݆P_B=e4{`ıYvڱТ$~mzVHgFvҘ=[l`Cν嚣ko)r9Vkɺ~'N\8P+ނ٭T:g:2w> _EQH <5; kR .b6A))EU"ҚVotJoSmT!N9Y1ܿS.WԖ 15  ڮEfͻ?Xb'tC0N\ى+) vrb\D |}]sTjkmkZϹ}L^*AK.$ @Ŧz* l_?S?w?#|V+Lbcs r>|RWpf l|Tʾ`G A⢞Ǹ+BrY4t0V<@ ?r0I!8Xa R\xlcNZ~ *Qj϶멄Tc~8ཱིb)2Hub慸atgD2Ir[xhMM(6cQS$Q5ª)W-<ϞgImYU+ "d8?{϶FyE;u# 0,as/.b;˒m-Zem!vRu,'N.:*pV0i75e ?TPI IEN LfLQ)'4[aQQ ^%TXYLJ1<ᚵ^l}euc]pIㆫ@wt|܈m!H9}SܐUI*UҢiЍLI42o}F&E`(G>y2θ0 }oK(kGL Ec]{HuM&!A6mQ=3)dWQTm,Fb{&aCl1A :j?S[:|); ߳ OQ$|/g `BDċ>aAR ${QqH4+ɛ96@kX5BN5.AIa@4hp hy Xȩ!'L1S-GܗYDИQ( y(}diҏDobQ>7+KdETڋ8wb}B'sXj!9r&9#?2FJt巠tHvӶ4kM4u0|ɐ` ZHAZ/G zcA,gwGВpݢ*}eV)UC@ 5;s;ʛ  Eи4hߠ{m #=c1Y] }!cﲨ$T-ډtaͨ8 +lRk@*ÓH.Lr*,MJEȘ`E6aPd#-d= yO/il64fGWArcka!5}Y?`6FDO\ނ#hP.A~P,8quLԪBsH֐Bsn2X웡'U cgQR8b^ِP}hJ'aY6td7j)1P'$|:ex!wW;hcQuHRƠO5JkM unfnsheͳnQ佫>R2 9Ü+SSb̦;EtDB3b6,IM%L0ƥWJ=Ю{WJe"=4B\QWJ+%KRQ-uX bY.qVO]mQGsO:X,±kNH U j3н?΄܀~ҰQw#YI/ O|z:4 OG ,v>3׉0ye[g Y99U[6I"r݄aE%[ wTJӻK1˫YZ4lЊش o_R`)eUDOb\. [\jץTG7A JZag\p4Vow:t [VkZ=ؓz9k0DWd+@`YjSQɴorPxvr=ѥ6>,OOg`KAB\9P'6z%v宅 SL6&C[b.Zs#skA2`CēGf7 !tӱQLC!jl7!z$EL`g RRJx43S`11`<<hɌi@=d6ik l=CW"K<"n_Rj=0T7:kc?++Q'z;Ⱦ]}d߂V/n7B\nb1тURJhŪMnܻџ귃aضF󗍘r{h^"QukDtc1b߬p/ڐWNjq5du;ͮ)Km*uzr2"txg7ff릿k*dFjGUJ7Im50VS>m.ppQ@YлZT?7K#O~Ro ݪ@[(݀%hi)TIY)-^n837#\' r<sAUFJdJ82b?dæ J = DU:2>19Zm٠,]tA4 ݻٳVd$9əRH̒oE'FG:je ?:j N.KtZ]Ak\Z5JPRh z?d$5J`u : (iDޏ^WV-Wm@T*VKXhaΎ]"&њe,LhD.ц޲Zݧƹ9+U~".V -̐NfeՖBt8A9is5:3vę̘Qt)ui)7"uZFgF'}ݯJ~udO9f>~1,ۑywawGޥq'7F9@ %&~)b?|[:t޹S?E, r]D c(ʡ('og*' ;on!~NSĢ8-V=CFV{ 35؝+h|Zɳ2CʩDEz_.5O썯1MGXyODv"33CLo}f8$_0mG02 ׼~|"bË"/g!{#z=Ja?(<^/ ;[$vzvm$S&b!Uri^FUCJ6gpYTP畩j Ȏ9嫤OUE(ꗵo$fԀv1iM iQTW͒#t@ ;dK%ԾBSCJ-vUk17-s8RpecN`- 0D LHFi{ltZ(NK,ȳ#mcPЋ5Z\Z\&:d444Tt)Z2x"Ǡq'I\b` wfz鋞X{/nI9YRw-h|@p3EF~Eܰ+>#[6cPߔ4zև4Qh^gĪ fcl #Ipп|AžJ-Gx%_w7;槊g x1(n#(Md*I`+9e|Qiľin'N[u1Z![-^.׳)$St |7oޞ4oﷷϻ0U("r2mUތ!kPY=Ϗ5 Cc0i-|@{{z9/94CX)06k5Z7fV2GEh"f壐eV}t&ljiH.y.A!Y=P(vB![B nVU.7bTUgSp!Apl(B422c1콘!&y@m]uh] WX&1`@Zъ+NA-hMBz^qy:~-j=?>\2bw尊DGpߍ>5D?'m`>ht{oYkd_2)o~Q ӭ"NȻz7y?sZ_wmmIz GT_ad n; Ӓ"ɊV|H!HAtwݫդY&LQJ]:J."<`\; NdDjN)ff-ry޵,KCa(JNLjeq5ie0/ai,ň]v}q,5}%coYaƶ3>Q{D -pÚ1l1@yZ=`hJSu'ڭөZuVUW:w^vV}6wQcF+F&[r}.i[hOr#ѬD f}r'G7#`56U>`^Mrgَ}zutQXWG{S~ h_LWj!M%g3)짫?'.n4ASP&M*+cQ}9dR./˼ 0vtªbN 0G&|Jv7k/|nB1 Dm P>} [ Z'y}Ar<0$V|lG_ &~FY9=j3F^+;9#=Qwg?ApΗck & //n}fK-lq'!_'er#0I  6H Udk:$Z$90kɛхw~9z>)Ѣ@UB\eǽt /W.`Q S7Q|q'Z[v:Q F/3g}trcq]Í_ v- WnLޭ< ({6 cm_# ?z3t38pܑEƃ|RP&)8ǜ!Z+܂4Q(?ok _nhx~ad55?P Π:3i%CqhU()pls޺,r&NDY 'u1p5RYCibQw,s=-cO|HAL~',my \=Wޅ%0K nڰa^_RJ4J&.b/Cݍ?:Z~;aqA(Z*0i>QPz΢(5 r.̱A'gB׸3"ⲓ/EDry(d2AD$׏&32( 8-1x.")"b!F".ELȎ9w6"W2Y,]M)Qei5'I]h6VSS :H.hcHEqV6RHt L@ \:Nk$UD6p4L ! d^8+&6ai76ѧMdV@VF+-PQ)+]RjiPlG58ß,x굗'P@'Z=mD/Z]>bvB@ - v A. DCCy6 %زB^Шw*Me߄iB&D+A׾ \ýKe7Y'rg-c,e-Nswy:S>E,ο|~Yd/YB@ub*iց0pmQ W8CtJ/XOڣezIyľD_R Ŷ= LI!e%nJ -q@s|ty3OW(! _K;V]J|2m 3:u[2X|@.#PQr`aRJ1RC4K 3B * ˨3-Ck, DDKtV:)`51H+u$\ETvJ "۬m3%8p4)A%]Ppӡ`, U5 >_A|G 2Zonۊ[n6y|Řhy~](,Ybix#"csR gsSNrp,3 kTde9cFb}L@RŘ3< fNyQEO 6t!1WAZl5"ޜMF m޼P?|T(qG;XIH(9̸:X+jZ\|*X_tQ956ji0ft񝪘6!:ӾMg/hY2făX:ba痣h>=9apK[SzjȗL hp.όSs )-)p]rYM mQ$Lؐ({h(iB4^&xm(=I.$)(ɶ>QiD֎FO1U6q>Fqp,?#2u/ZXzC 'ov},e5 :0(lIM$1AO_Уk;ntCך*tjr=v%)dl 5/*)ԷvW>fށ^b43jo@O/R=,Ѫҳ '4MoFɝ.WoߗuOZQJ jJ"^tytmczgؑiex'܀,[;wkt>''+XL#;N6w{_o2Obc5e0^,kz׌SfBQIEyl(O1>wš{s~q}3 '?crRr.6MH IϞE;,c9oˋWO ])Sm4L+mrGT.憃.PP4KxXhoM1Ze55=b,?b|'i qƟoK oۛZՉEK8|F=y{˺n7w[ʹe0{ItILo?:) }z֞ LsE}ݛiʔuə"9ȿ+c:Ƌ%_4ኘGiyVgyVgmjxH@ iȸ6D6!Dɐ f+<S`|ϲc§wWW_q|3}/ځgn>\$Ho>\nF"717oSkIa|mQD/NInwSJ$x )Y{- P,wl"2H8&Xa>3%@pY:j6t n]7xv4u/lU@پZV{caigvVigvF-[#ɐOJla ,G1Nv* [;tpY%jZU%歔y#USn l!h hHٰdVhdyK|7;sz J3YDK>+-Ϛ~ q[C "3yX 2+!]Nt&^(M%=['/'TS@OЯc e `jhk9۾]'X2EZ|-G5KP2 ۝;ZoS@!+iF=]A~OWW# :Ө hEi&hn0@ }_✦؆7ؤ[4gyZ} UzqHGM*L){aGww2 g~HoSOkP-ٻm-h{kJx?4YǽsgM&I{gx@t˒BɎLHJd_bӺ<)@%YQp$nTc S~,N2GE&P~SmEnQ/ΈnϪuOP.6U/ Ѩ8D[G-Ԙ]Փ S ]LLxKqڳi;.xܚ911Rgu?]{!Kj ,"_3ќEMO_6( oD)V%R~$׭οGyػ߶踓>_51-Paim{ٜuZ GFٳ-6k`) W`l\,Λ[7w1^R/{ÕhUt+W[|liZi&=YU |/0ڰG"x2T Q#¥ރ/71|O!axd2*-Dәg>q'ψ0RaC]"!A5x̭%8l❒ABTDf0Hk +S[ם`r V0&ؠIyYuVN3w®G$<$ KL#v%խ!QE/WA^"Jwd%HE`IFρh.6?]z VZI967 pCZBgk=##e+wX3ں<}Ѷlv8ޙ2;?U( gARicjRl/Q &~hbpYVKXYf!#~SB-|E6$CҿH0C>s+ʄYp%+S!>2Žc!ZnHAL k=ߒfmSS=ܻkYfeWwWȩ0ϕ[D(1  bQmOEB[."c jܮVaWf"V+Ri*$wZZNP ~*T ʤ#]e/{{_anaţ n=F"NChMvfeN?5<cIq; lr񔍧 OԕN?~"Nxl~l)go=(&+(`i;ͨQC7(ٴM+)م@6<sĎu)gβOh Q$|ET 1 F B!aP40T D0 ]EW8,Y 7 /f[X~Q٬u'I%q57%|xvpB3tzQ4yĩ #fzrrb^O` ,#Ȼ̠#+; tkz8}*rl7F˻;'43 9D{z:s7<Ӽ#OD [c7,fb*-f@v;nfb Uխ}g{a!?z־C(U+ܰ+#y5I+IB|rޚV#ምR챽ETH0b8y'Ͽs~dgAEw߾ R6:?wvf&Yh3qnVg]e,8?j KփF)r-= M& $O{M_mF$mve&E%_( Vc'zsϛ pm4pqIrʽYzر&],m7̜͉銫*+*V䌶 w) \v"ٔZQ;HvcrK(x]qYz  a |18A`QTfݵŃ`:MP+''^_iT[{<>p~Pi3ξ~sq.ؘumYg5(!!u'ֵ(BA%a c8&]9k|6h~N֏ϟinZy~V[jd- KҢ%ȌHd%6Ci{* 1 <5O(i6>XU;iJJPn")K7g36Dj=E,2b:zYs,Wmlpvi]gW*VLŽy`ߪܘO eyD'Z%|ӡ\>J2 #PqfA\AD&I|QiL. )A>NJvl#⁾ ¹RG1,k1V汒j< ت2lYK_| B @V^dNnuj/{ :D K?-ʡEOݢ ={L?-D3DwVl()WJX@fe~2qnjZO $y@)TB.=`7!S{^mʋ`:{_=&y(yd}(1ZFR pn]. {/8G˸rf6ܳsq~WPjѼmU`C?/f]=X|KE`GU""6y;Q).Y Ԑ:V#E%I$]j\ nׇX#dP4{W37<;$uN5nBԛ}vemx1B[?og$p3":lfY$Zaq:k 5v]fKR̫B (F\|`i(9d?Ewmd/R]ǕU>As ik57pS 4j^u-lrUw2䐠 "#xǨ{'W{"МݽҞoV,2> 6^·"~[νNե6/ -&։Ԥط GTck q&\]߹˃!O2f捃+ѥG'1U>ƹ q 9L9pu yLp#.yBCFbC tp+txFٙk"%z(Uw3e6RG88ZR"E>~Rʁ8wPc}]K@ [8 0R@@@Nڣ -2G 7t8[Č6_&S֚ GRZdr:E' K8>9'`4Q'F/ʀzxrTMYchʡ4[06=Q-ZfzLF7g57oYw0 ]֌UTCN75+aHfF>lI9d( FYʟjbF +8_jǧfgzg`6ӞEvOǯiO --Nigy}6>e=j/SݳI/nۧ+C׃nL>-0}|65W?Tl ֶ`A.))MP Kד 懿ӽAM r_?'t[ |loG~S}!%?ɥx? 翞M &<xmҹ ޏh?Q V~j߬t\SDG;$Yi Sқy5G_Fɛ~3L;lg1MfAp5 maiS5Q~(߾ dtڌ7o>QF_0ٛcfjA @nGYs_3S1\/C`rO|1p}84C񰾘eb!>(ݕ:mh#rin0 F%荣31+Sy9 4 gR@N~BPe?񑶮w@Н ZW_k]}[]}z˧_xwmG%q%K}JRJ)|S 5i7,..r Bڒlݝ{zƄ!*! C, \%%1܅/a"x5A[ɽS9/4&VeSsg>#ׁsN?9Ҟx=y}-&lwAY,`ƸB+%0'`^_gҙ.Oyd4KA(d%{.y~m%{.sZ\R|Xd)T`DBa^i@Zf\RPDx%3d<\2G_F-WoP *XosɞK%սӞK\璵 1dmn&EEF{ʹ9ԸKfqSKf˦ltl%QyOr דB}lj x?/wu=A-gUYl0CekɌʁ'}/cZyqBh-<ʃ^LZ^;}AbU\ t9-p˯)KUSѵZP1,Ojߞѵ>vBѵZ]ktst{k60"SZaE橳3מ:;@NէR9ŝ`өV[L`+eC1V>Awnd!en"&_k}V&ᗟ38ѪS !+䏪(g;(r'8,E*-p):|hq-`*dE~>W˕`U/U.k|R&oMnM|_0( =8e5򹒶ʋ4Lvjs]<\{R|.XdR3m Y,4BTƲ]9{^0JY)%S+#Cq*\NrUIO+^|>,ayh`"Tg8.fxw6 =2O% Ye:s!]WϽp~4g̿jn&B|"BZҒrZ]q|QHΧԡ}.:֞nY`S޵.ݯ8W4F^km6Y '|ZEhf}ʗ|sV>g%٤'{2VQ Jatald GLWWƊDة>c%A9[9@0X}sTB>g=/*/{\/ffcM"$,\G#=7&8Ns Vp23MR-T)"fdS5ޖ &!eIH-XQc!& YfI<AwRjj?hJ > W|Pf׿ތ`y\xg{Jg QdȌ4CХ16$ dtWO)wP&}_(1[k"Đ]A4U}F3/B "pWTV0=ɚiV>s0Tg?vNF;Y>D>d١DG 5 )XI)Z"8&Oil1dscM hf98gI8ø SqjMwʜ4 A C|ȜC0ZCbUS~sn;BI`G$d{LH .:[S>6}Jrz K$Bd&OlE}3~T&kcه{9RHgZwHHHZ;I?7ͤZ(Q{( .^^^^Q%z9 6F@r {-ϭ%gZM-]#9Eeu&dAQ ځm ! HTQHD4XC$ 8 p8ps𓺒,.CE )P@Q dN8W| J*,o0ҎNH ˤZ1 GALc`BhqZQceoYUEg#j댌Sɾp&U`ZE)2+wBbO#,,Pl4XxMS3jaPz g\pA\`$AQG,O.nj+x;OLyQs4߄p^yQɂypkRwW^5>ʱ(]`4@YxmD]V:9 ajQ7>⑧8Xn2=E\pNL [B10 (8QPDJ89"@d̖&VxZ"֝Caz\1LD-r4^a8TO}k*&)Ebu;6o^9DDpB^%AMŴa " (__ܘhz?>p}zw[ RۃZ_R9d05q= pD'KjéۥbX_b,vX|'b#uZj&`abKwJ3@(FK{ۥ>|/WVt+t:TAkK BZ:Di=)zXktDCwIA QJN[5KƈxښZJq=шbD$S!x%J6|6GYD9хq` >%P?E"Ns"$6uilK6/Zd6g0MoʉR?^7ZUMQ*,k?MLʷ1Lo.'E"5fЗy#Wd/r#ZqaCJ02}yL)|5@l=^ZT :y#B@Qdu ^j2 < f0HgBs@ KJm'rQchaީ1;"끤Fo+77҂MFoCMF+@n_"+ZE;궨ΛsZ[Y^riAmn44q>!)BVƭO`@ "Zk~o@J:"\39J[ypk=*Z.j/[r-O#rƛsNAGLaRindx@w<ޮuj}J]]SՕ{aS^yDnє/¬))Ӑ֦+ .7x *DȩM:8Z\GH KpFt~E5ZjaZK*~.µ B.^]kLHsI~ Q,{CFc U0 ZH,\Y-3UivDe m_tSGzpM>ޓW<%)Ã?YB9_h;kFD FD|eE1y Ş5#w-9~Hpt%5w.~n/)$BD ڌ#Mgmu5z߾$:=_f^0nK(g=kP*"P۽lzж8k/3QXRY*\f89Ra:nx%z]r"8 l\Cl:H"ЗXm ^}=^Q4JgXejDq+P3'w+Bp!E rPs} |wKz6caΩsb> T4c9 KO%tmzjt* ŠQ鑑YU@C`gH8ZXd`+]9bUjOygR. IbRN׫Z*8D U^g;K+D[fcuyvwYW9ĹXʟfա˺|\=x99s.kw?eb]8\C<|fqj#^Ⱥ#7ˇ?uhc5sde;)G05jA5xg^F/։|IRGj9{[ޙBꌝ9J"ދL_>4o)bCfsUK>Ƙƣk28-ژ<1p; d$$1o1Zuk&MGY1_&r Ƙ0V8q0J6JJb YAFY2 B: O1U 8%4va(xs@:I;ZT\5OBc}~7zH%HMZ_ ZWXaIyƀg(e`(ñqð y (j][*xnVfh#ҥ1 C hiDB { "ZG&;:$)M TJBl&Mm>*qeA,1k f-9jmg7;GG40 )V̢҅D15^7CHFJq SJI)R!UAc  k k!mL69[0F'EE&hGEE#(#˵:Sm)E{&",F] vQH:oع%s {r&$ߒbܖw%%CF' ")lh?{ƑBCaXv`SN{_5IICrF왞ҊpQOׯU4u`<׌=Jr7raUlGQ׭OZ:6))D`r;#A]Fsb~w`ur(oҔZ"9LdC:4:BQ)"hVXŠK4Q`ӽBu .,V|* `jAQcO)O-[rLbA2[p#FJ =vZ1<[|ɽ249=Wf1RG+#XF$עb8*mW+H%F2+X &XdP4lT4mQcC-TJV"6#F H=_9zK'CPȐ$B r|M*vc!ݾ͸`S :`F@1%TQl(i8D #/ #1@ (r:z)UPZ/f >u%11MiAzt{)VDD&6etɩ!*%c!H0 ,4f̢̑7Pp8AC,ƕsJ'N RąEWp( J,#)0Se(Ö^WsM.C[~B-;KƲG~BͩmA(NOԑ8;= .PھeHHS L9Cp3]چ1!gDd H35+9MUZm_] o/QRwP+];ݲ}w2CU笛vqZӜY~&<5Zd-ﯻ0;y 6O lqzv9÷^ge&.9"#jgJeS{&$'AK&H; In111qJ$380^XLKƈ,Vqt(iQ(;梧 m6aU\\V1tFQK%*O?EwG˨(ltD(5 BE0zo(AXayja#b :lq0b~iLx,BS0rW/b :mr$ES4Uc 9ajgU&jF$Iҹ`Qx)n %1Nϊ9tcI9OREp~,)VY}d6$ ٱdb$X}|.RFƒڦ1^xR z ɎY:M !0G$cX΂RCp9 L&JVCDn@i,(c-8 |z&cTjPMvuL,{ Q+BH_5a`q`u zY=)md/JR*vǀ䌂8*{6)(Ыv=){z4jDm4dDp34:HiC=[ K0{&"XДGg`XRMclCb6Fb@zāl5reaH*tGPmrKD-ZYΟN=^G=t#ɹ@A-w$0 ޗkV| p˄Ll,mS3`<. Qڦ0C3hڧP>-MK3/P6G QUK.HTՅq0_y../fubdO޳geXoX' 'W0?W+3fg w=|=*fvmΥ be7 sH/{VD#dOqxnUiV1b]et4zPw%82&kx!zT8FޝA%$ɭ*7wI|\oXY ࣥX*<@Pr7g?JR9zQL2 yF(v^iͲԄeȋDMIZ 'P"N҆+Gg F=LwU%Q;K>fFH >(?,i# ;UWjTPSu)OYki -T\&xChY^#K7`KCm_TR9ŝ`W׷ZUɾ.W ckfڀ"8URCYXEҤ*7܃#i3 iqrd@e8DDN9Tae|:MU.ɡzN>ѣds݉)o=zP2y;S4Txh:͉.: C00W;&$XbDtȽY BrPz#ʑVLgs[Z6MEA:SAdk{܅4.Buݻ['Ҿ:z7ۙ=ՏY'w}W`=4}0Ex O6OcЪ|XM'Y/߻?|x@~no>]^z `C0`o3j '3 TnyC9k#ktAr:fkD%O ]~>~}0,VUj١>+ x >TJ[[RWqÙO5ނ,+E%Mdr:m=$d1ϟ8 nԅc^i>`}Z9SY v`ȄΝ[ }ӴĈ#` !r"4DƯ׻f=Ӆ\рf!!dA!Rõ!9r|&#} `?FPA'a9!mb,C01/btU藳jm? yhTMmgywB6\[ի_b]s+smŬc>)4`>N4[ 39'KX BS[Qaẙ}/+k%TqihC)jcIkOOLϪW[r2{ !/LJ-LzI EDRH֧HxhrlЊa PbaC$[\lS|@eXjFX9dҘX7ӝuȇqoߧ\W~p8x߻`'E0TTE1#oJ/9Oy=*0}A86h1)|X>;:/.m41Q鑑YL sG*<'nnrU ,}4MYK]HS|+8-@djŀ}ᆨ賀ݷ苡.X 6MM`A`lN7.|EQ%jJ&z8+KK|)Cc8=@ [0NT0o@b/AGEk.Ѐ D|YGs .3궼X2wMa-BjOT>v6fNC#RC\EEP.2Iq!qV(MW![$D{ǕCt!K`=|,Zb痒j&O6[Ϭvgq'gGUvhI=)f_?8g \雓Wͫ3jWIvD$Cv([ *!YܡtQ{ٲ9⩝$#Dix*uDqX - zu.ԯIYjII=!<@^f.|IUٮJUMN.[!GÖq§)r`JwX,BojP40냯m7 ͐$H)Sw{`:m[`kU&ss/tDSU]yqUSfyV"T#% Q{̂fQ+th>EȱU;!yㅐ}'oKt}lpqZKGϧuY:e㋗V׼^_ii3EOT8^փs뙛R]Vӎ*!i$ y&bSsNϵ=[ N3֮Umm[3}m y&dS#[F70n2c:n[Wݡ[3}m y&dSkw]Sw޴N !(%( .FZe0V` Q'Y(fCTQlt.;2%G}"ZE{acˡZȫ%]_ K3Ŀy=;u_cX7&4;Hn6p*>!趿u jw;͌E1,䉛hM:sD*>!Uºm[3}m y&M!N3Q݁^gq[Kii-ഖݎx%#Qvq&!Kkq"7B0[d_sW;k;H[R,I{,@8s{YP/=tYP 勐3DboPQXPW|I.A $E嵈! dF L5kjFJVCfwV1R!u=jGX :vQO0;<3tB# ,bXWܳ2 v u:w`y|cݡzްS[%KA̹dƌ&ɮRQJS>vsR@֌GoΫdРsRSŔ']2'QVBѸ5>y{@u=J&U}:Z1h y)}ٳh y&bS?,èlAi}tcvpЭѾ6\F7g"wkAi}t*4fѾ6"v9a}^ͩBکuFF,VG$Vi+ņ2º1,.dB Ϧ"02$l` `=XkG}KZ1Ǟ1 )우9γ g,+\S:<WprH̄Sv ӸiسeF9v:mNjם6` ŭ}Lgʉek#zhzQh#h+wEh#}gsN߼7˷ ~ s5ӢQ);gbY-ԉI"+#ӛil[iLn)FJ1 +Tp0*SاRFR&X܇R6gaJ=(޳QUBkD ݃IGJ(g:< Sğ_~jEX;̘>H4;=^Ag;CB\G844Smnݡeb-maPr(ћt) 2I j%+5UN"$C. Y)mM1:vZLKPeSUҴH$I~.OOH?:J;ְؕT0Uh">a􅯯 *UN=Qx^pzөf#X)rNZE#dDcE &yd(.7Q/d5Gcvu!e EZ.)' ﮌƎ*}zdpAuD 67@cwJ `irHNשv#u/1\ͶvF"ܸF&t2(">E-gMj߈7+ mWm@ص>B3qh}f,+=8[4i3okgm y1 NL!116ƣ? (Mv2}3ƈN= e:f[t~sg?=NjIA #%Ah2{-s'wtfWL݁A~vZywZcHN?yUb}.nwI]j4NCdK|G1'@NcOK^=$'2 BJ(hszKjy=ĖhFnӃ5M+qGVgv(Jb; uH͛$@H%I_'8zEHPSӏkA=9`!42P ' DH 0 %B _$$ G⼠L3{9pZƆlK a«@ vR8-'LK 6$F,)I9ڜ'2J8^*FT3CHɳr&Zd`\YO1(I8f$9oC|QW^c&+^w Fř+S!I{ DQK cI!%rHߪ杮'dۥE&Lk-C!' $ ˦$-wJVǨDrk&ҋY8OJ];IF9z\p@X[OŲF(v{9`ceCN' Ar ΕwE_\uĢKJI,:Cp,desN*̜q:9IQk_CdQP4 F5 60Mw =1٘9e x` )Tc!]'l2BK&+CB?#RDfa|TZ4'E.X)|Lh&܃05QAavN)$Zc)7LQûy<@XY卢B@j*v@|Dl )̀ChZ& vi,`,dtk͂,Bp_ y\ ]ފm\; dbl+0cHB`KF`pKX} J\J}u22HS/A1Z $ x+ #.̑jPܭS y)h6nF]L(ĩT7DcDxb.9u"jJ 8ZO[RZd4($+pZ(8kκ4ց rJ$шқ 0@g#ǫk(4@aZW-]y7Hl"9qp>2QAؤyAМ(58 J}0ba+gi8S[U hhZ6L 6Љ" BKBɁ' .ԧݦ82"셇%f % s3@hSv{<ZBizcJڲ?|Y ;|{R T ߞ'r>׃˟J^8{Oʯg- Gق`|5Aɬ\/yo&=n+ t)WoiW[  Y:4_ڗi9:j,+Y% TƁWKZk?[x_WdE>ד )P]ڥ_.毟pcי<ޥV3eRivv$y*h4B$>FAí at` }eRg" 4-ZI5dXcIĂ%8%qXM&0ons>]ίw6#atzzb/>~u2lwM~yW#8vΣije/V3گyӁ7]]юhRx@J}?o4s@R=K@ >JBb J×Rn{7=Eݒ^ p6j wP`N@\J{6v'19~no5}&9~UYXsxYV'TVޮFq21I J sL#)hLB@­ &'X4ZBmB LTͫYl^]티-oE0Z#QPQز U:~&xϺqM~1Z9+7.n4+{{uoi.o]u2Zd]|fk7 ?nxu]rW_._Gj[~;IaQ;+YafXxɟ z·nqAO{ |9wW8MUq́U9O^a7:v#ڡӃy޳ݦ֟JYcbG; [eԚcG~;\mPpɔcZTe||{ޏBzPš'"Ek<ݫ $&DBJb# ܆r-A7Ko=,KPt⤹j%ݞfvuv}˧"U jMVݥM*܅yxԀСǧ ST[sԀI= dIÏ`?겼{խG?jdwuԣքJ;ZChɀTр8rCd],,<1hPՆmo8_We*x(WP$1b}=Qɂf<.Gתmk0[_]O/*ՁXaxYu(:FƗ:jyeٯțep[tt:^$o~}[ɺ\mE|!&,;Ai{]P'Cmn]D'b&nr~MB=vAәNӡ[6E."V11P.![[4 ݞ?ԩOn ݺOmo4-8qÒߥ4c[X*ԕ/<-. =|0ߦ(͆7Dm cu%^]9d NsaU6{ CߜŶ53IlVr4pe1ꭧede\<)$w&NmmNiQ!vECSՀ( ЫrPQH!H! )iff` Wꖋ,!o$.׸ufgawtipF㨔yq? UO{USjؿ{֫myj) U}C׼sȚ$E\AV?_K=xyx3>ʷ]_MW-o^ޞl&ŀfz-M Mp3ir$SPD=W42Q_˝F$Ga%I߹y1|ng#DksY'}69 !L$fpTZb<"sTD"2ՏRɘ - :EeTYˌU3` }BأI=C5uΦJS Sg Xv4biVъd/+5Mu!W^RG$*Xq&idE*)%!,ϤXI\&jOH޵,_ o$` j+E=vڤ$%RX9#յݗZ Ȕ8Z]Y<%ZZj Jru{Q5<T6ʦ%bQ%(*|DPji@x"CD=&;buZInK>Ob3aOj76&zREsҽ̓\G 9Ԫk!U <ܚ.Z0oA2HnOEW+n:MUeڷs9h>2N(QcvND6>j ؟Tl :ܧYej]=gYU=$VE"9,:5?z/,gF^8ǰ)zV̫ F7֐0tq,{ѴuKH'e Y%cԭ)xo`m^z ,\TBN5~V "rΪڣ md-Za$BTd8X!O˳͢{.'KS ss/)8 4h,!{4|юFKt](_>P#tϦ_I 0ePEzhM%hB;X z-rB bjw ,h NmXX-;ӑ{f σFwX)"2)YC+y2>l: TAru%&H?<(BxwG תGy0 !SWI>f5BeV]B`;Qb5\,ԚϽe=dvq"M4MR&8DxS6(\+w#\7}TdߤvAVe"m W5 Kp0% mm M.γ׸l־ _bLę$k0AP!n[m#T3)-z8cNd Eն!hY0B$K9Oa'ho1Fv(Y QiR{ұR9x7 JxK0`Cr[Sl?TFӹ-vfA +Y wSTP=a)Ð2:u,mQ4 PAzGZ#[&H+P!>]Ow`h#R'8YZ XamꤓdyMS'Z-* >JH,ܽnYO_CL:j*Fur+uT-ytimxgL͊Ǖ2g&e=H՞K IXCL@Zq!;rmr4YgCɁ!m8eU/=9+Cz2:o)ۧRj޺{}vols" Cc8T?Q^Ox|Qfԇ60֗@r3lv*l]']Y`m 3<.Ζ6і$3'ZE6k {*̝ٵ,OJ.B:jJPM-G.XV7uH/u@iTFb'+T^UJ.J9WyʵZ& ٖ,Kim.!T >̹WsR9iG~c0wi>=ۻAo2-Y?׭,~[uGltC9U:>_~NISM}ZUXl`dXZQ6$Ѿdk)9yD>tP󭛘GOE)5tp\s{}jx;'d̔U?~ê#Jgc 휚ʹ9٬;Mx~3cVr軓A uYgXP$YZl@i&S`{G@_TW)*vݪK^C]h\ҾFfk60U͈i?N؆ښ.1Mq#r-dXC:i U,Bvyc  z[DA]$7AB\sM235$PMuOqTjH!j}k c:;iPSf_Mgv ABt|zݒ"[,u JᄦCSfKpJ{עEPO> _sӰ{53\sw'tdrn>;|ܞ:YK|sR Z:_}٨$M:;5/6{Zmϥ9 b$^$Jw F9Fސ{/>K͈P8Mx&.vgI 9xp`Q ˄&ݟ]sA2;Z`,UI .F7 _Yw>,0ffp;D)[4ip1x]w&>Jt(%D6k+N+Jqq?l>oȗ\ 0Ley؂l"u"=09u8R+(pjsJ"G=OaWН~F $‚+y~ys?V2C%)by|`OMTkBeФ5D0'"\tvT\Ns@}+{Q&DY{@3}N(yKVT i8SCwL[`Տ{-f6 N`TMC֎CC)ڻ0o?tg1pG:WGnyt7ΡSRm9!!&dT|%7hr6r[xD*2[K8t+ߚ'vj)߈glMB4x*w*Tc;=AD'c"t-m8On&JSE# 5 hlaA톥TK]&wiVcLKT+MZ֚ ( P vpzl|^fg4rfMݴ*keǘaSL u[I/|ثV3wՠl>3{:cI- ec5u8M&f>[ٯpWǵCw\ D2Ļ;[pf-G=ېw@RHS!+4jw̙/~9Ό zXR^)ðb%꽿ChNy;.G-)8Géw}F .we֝^+Y˴ʭP,mUE(e:w%e(- MYcdh w ~;djrOy߹Orݟ"{j߬Ex'ӛu.ySYOd}ofN֯9'I"憗%\o` n"<wgj4/~]|[3m>3.&U3dN|'pmxPG3\HL( `?sw2!(R2BV(wPh8X$)cc GZ^\M狛bqX)& @tbMs0lt}>pR@T-i.FcO3S`bbǚfJU2g../'nѴΉ¨Ѿ2'AΐMT)ý?j:y'\laiM'Z~tˏ}rOo`UfG^ d= >e&|S:OWSE З[ͤӡKE|]ܴ}+_aaa>~ sXr6 8eV[bQXO0]߀ pkCnv{,[5O#0<݄9HV;~ah1rnetzŁoЊ4OOZ&fq'@$ox6t`y|D3Bo]L&-p6)Ld'$g#vl_alDx~0 tJz:H'4$$8|0sA4Xg+4[x73 3"~.WpYFp0dA7x*47|nRd笁*IAٝJ|Rt8!=%<Xְx-=@<- ꈀt N"Un_nUUXYUA7UeM dXޝ}?A!8߈v2컝daD)!Le{ 8p<9 1B91DzQfۦG2mLhJ"{e=OL C/)8(ӧQ:0'KQE lĚвRHRNs\Q[HiJq0^J;W1Tf@uGR?9&$cTܡZ6 X B}7$tWL4H\DdJX[i7ь=< ?d7Z1m$w2*ABr)%Ozn͊=GnN7h#`L Ϧݢ n}HW.Q2:n^ݩ&4Mp ckxK`EJt(KX "r2"0Т*%& -!V3C ;,`b(qg"ZRaetUF1](qڔ$+(jdŹ($b#1 F#"(v IABr):jT3Jy }guac杛rxsGp!DU+FZ)DaQɵЬ4Ɩ3U`+MUX&m_nh޻lK aHe$KrP5(krl`m$<ל9GVJ c3@ObN@w*)-p`<5 FꁤJnX hB)m{O2ߥ(X@m0ѳP΢%>&2uxw9E^ y"$S'ٷnsn4wԱnF'm݂nMH  #=FyXPxuIV#_Yw#_EtH8; sMΣ:KI6:;O ̓}s76:e|-\5I)㍀c.b_@W`CEȍ(,c }ѵ @nMY$W3T((ro<ˮ'4Zy3n9,)8N`F C-l>Mu2&Fx4G{ܸʟ&F/fvS尧22+Y;׷u(n'#.%4 yɮZwy|llzw?_Lh˵=EJrÒ7+GL$_Htl$!n-QۋG4< 2ǷUXC0{+~]tʰ9dRx݈[TќCa7\d PQHisX28d{ayFyUjKLϓRa7!M&"%^n KQQ Ax׆ Nd6['z mr$c}:tp:0&KqBL "$5pFͧ، ''cLr]q@$:_l-JWX!$Z"8WU+M^hL!ƧM1l1:ycMZBBiq)u=s5%&d1C'c4˫vl{Icx;'㳌 b>|׀PAZ9`ÉxA)|)"7zcAoun@_G΢?rNqt$[6n ykTe6l1C$i^'[Ýr`RDsv=#@%@TXaO=`Oq @" V1GKV5@\8my=\ %'K.<7';P&Y aW-AdXv?JiI5^5qeQRmɟpjMpp:5} LM7~Y?q@,5fR x 0)NîV4myJ>m/| OI@ɪBR$[rck$< P:[\F7*1nGiHJ>HFo!YDz HLD$_yq7nt~:rRp㱛!_&#gqql 9"sB8S}U'"ĨouS3aܰ9]f#$ g:*c (ƕÌ(oUVh(D~?X(E_z$70vt$e6<{MFk{P|[7;WCxX?}o r4h]iR[INK+y`{ҫO%d=՛/D7v2VPU6~ƼUJq%GLW;WHzu۳p;tdt{mf.GK[{J\cky䛒ğ^5(_wW^?} "&i4zTr 5g%9%61NѓyT9o]~x$ )#'q.u>Rn0ԓU@܆mѰVw/.[qـ4Vy/6yMM;p+Z~;W{wd,/ﭞ(U/8Dq!28ȠƟ7v ٱgx;ö37o\gpA[?jK[oU @sGҎY\/mߍNz{}^R#k?͡G S 5+/3NZ=W^߃;N >wYҡ~:{eH0+C.?ކ])fbooDX8峕?9D7Apn؎bx{Ş OE1 hsƮO4>W3(S"N`+-^~8*{*aο-mpeM|o~ĂwecAr/v@2jlcl.^j)3+I*B  drA VdXuHHKeGP ̋ ̏ ]X/5 " gROe5 ؚZƏYkV7i[uBV'z>7(1}72|Z*q0e3ӿZIe)ܘf`AD"03l T#Svo6POEE[35&\~^[8jŽ,/d@1*[2ϹT 0,p$"2:`fs5*Hӆ4BT'*O*f?S]Mek>Hs;D)HBJ1 (o\uy?51;of˯uRTGɓLrY4IZ@PCBR(S܇)%մrfdF eϞ9L-LId1?o?xf;CPo.sC/CA&G8@YǕ.ۅۙe(nTft[KTDj!R<](IH{s̏ |Oa$MA{l۫[8+ vpTgzf*58M$PxgOB%wBCp>ˋ-h)Q} qpnK-p rA|'|( 0rjpDbƙ8s#tQֿC U( Nܦ B&q P+j/j]G{v蕂]հ(QwsÖt~a=$$ lB2 MHiIJȩj3A P<=0PJ ;oK_z0rVgs/EWTPM+/ֻ#!GyOel5;;@+v@sܵ0?؁5kb\ 3UZSsrUd!9$ 9)pp\I4ᜥx s苂9;=B63 .TFbT@RxsN \璒<ҙQ9Z!] /UJ9[p m'"x1)³f4у &#C V28[p@gL8v4c %ʜw sPDh5!yLZ "*3)g7=4󖗗-$:֊ ? b2XYԐ>g?5bԁw|#좈`\Z^בwL"#0!$^R-`s?4[{1I]@zF=^b3ߗV~z1>8ȱ૯J( ;~ہ 3?l?! ~fSgJմؐ3 s3I!a^81]iu&<1̡/ g}FyJ`]?bOxq*(sxCvh1 9SN T0JeJ!͐"XR4Og^a-2J1C N(tJ`.04L4IK"# >囝.jbѡ\BfP(H]SI32j4RI JTIjDu-@(#IA-I^;HL 1?Kic7##$#9!e1ؚZ$$)]}#!C se1bȸ&ȉ !Pe)"H ̰D <QX<%FH9(bdSDUlTd8T`4T`̩&gvHB/ޢ> a ^vB^%MZvYeeҲ Ȫ̌#ʈHƍU.N; 9 a,xAmlF3؆oZ9+lO/ gl!9Ll8y?7 xd"<`A/Esd$-j?\",4 yA-FFNZsI;6}nxwVxX|@%lސ':4A*/48guH15%4jŏyw1]>Z,M/7񾿏˰,-nEmtCUO }o÷gXTPN._wuwN<""C8+~@3&ْ̇v;㾐 )pLg?.͋ 'R ̈|s8֧TKDF0Tkps=$ !EuKS$5N".pg|_@;1gQbBoms ī0RksV+v3+Pt=vטJpDS c}3g*0Q!XOH *P>Fa4m)!:p()9Zwڀ@oE<[o;grnsFǎk.[.oE-uy!CP)ֲ݇4. k$4*̧݊%)҇vi"a]Ra&kRZ -M55T0R5iAأ4p!<*u%M䏈]CP'm"X1rj0=kս/ c4i.@>NПQmR.4/B$bZ[ ! k4Ht0Z˱nLj}^ͳτ6*[He$xj&mw ]7v :WFhz{[0lh<.o8ȃ1b)ap)$(lvj;d5ck?,E_ hVϜjA;|po"W Wuyk4j_| Ce>wINn[uS _^zQr럮^>l8\y+egWk\&pt:iهOR̓9`.JL)\Ɯ"uÒjm *.d̲ M.;MZ&l}6()݉?}L/L9?%ǣP1(8CyyAf`Yfh?HGvLLvG:L!yT8\S4dUb'ydKk8|3!r*d65Fo/44#w ylHt}? u2wg$5#pp-2ѥ.kqB{iL]sd\ysv/jDnbi5 #Ũjb/fUPMҌͿRcLe}#([ 5{!Oc䑼T y^R;ZI]xE54^L7GTpr JaAY۔z 15% 9EXg ۃq$f&QbGr SJLk)><&Kk:3E$u\ORlD\BjYN<ϕ`<9\ܹV!_6q+q)ݍMT7QD%{SVkݍsrKrKbL1쑰1$",Zq 㱙 SݍkNTqyhQ}uS,`4 0EtӏWp}7GAۇRECQ,<w?]#R0O_c'XQ_W1zrW=?06*@P<T=Be±c`wY`%s&@וNYqHy[ZmJgïBONlgF?dOU}(c$L/p~D+5q騦0賧F/7}U-$\qQ~j[ydsSc\6wRsGwiY,Ő)/h8ЦR5!!c ZnuM9p?V5I3I:RBNB|Uq3ݥss<3 gZpsD-ӭF8AA'c :%-I0NQ6VKгiPZw|p6B!J[v| gJZK(Q@ed?Uwojy槡 |&9 j.铘 O9m]{3F@|__pO֣QBx?_/锖ұS{鴘Dtk\C/ cV#tDh /kD*< ` {uփ[ [+8>XS3 w3$/DCß{P&_ BrA%uJ0}n1=]mStTܽQ+ SorQ'^;<+y-\^'&i/A6 A˕]i3 7ͪG!%YowuB`nB?Z8ƭ 8b<*rshІYs&l`v9A |\ yj1è9Vnڴ΄x)<<92;uymx5r!Tx u̓eanz{Gm*Ax*KGYv(9z:x*v׮§ o|Y=hT%-'N >F*ű]<٠bScy 68BUUpr ^I")q+)w5,cF7"6h5ѺrJ⟧b9Z#r#^!˥^3-; 6sʵJ-8ph,`]5+ijZ#a+$g%Qf-"e Vjy@ o(G #Rv7֬BD7\FOⓧ6zmRxH>GxYAI"V( xyR Oc ~T-X3" !Qvܫ 35pu,")11, b0R r`[d($ܳ Qc(k)zlF\~UlE){&2kpt` n($^.,p|[lF<"mOw".V dܻťqÃyǨsݝ:ݍCBX6Ac{yPiA9+q8NA֥.udPHE7GZo^=v!S^҂{HnìDViCsSB*RC6;6E0N[֜[w* JҵRm+ciCsS=;t?Ed| )'ؖP ٙߓt!O+KcZ^t340r_nf>>\mTC@H=?_d6-^~z0)Vkr}Y`/7aw ""C8+?\Lg47|/?wf:qI}ghX[A"W/.~D Dg _1!D3N&4%9"rJhNv-qt8J{XDrL47`'|87g|8˩#NϗsI>DkJ}iޏuåˣ_˺,Ƹ7&cܔ1ײʀ4X10F9bP \Ve.JR&_ZwZײ֝|;p"{>!L&{o'줤s"]M 'I.- Il|@4NAeӆjx.ëƢf*(Jx1mţ97ls9En,@ C9 *70O jW1R?*PvyX/&T^dgEF1%|S}Ejn$.qǘAS]\*j9(_D"D=R_*pW\ϓ:WZ;%2ݧ^Z@"Jt0z}73 D$~v:G.‹ͽQ4 #][(cƧ]y.\kmk,j]=,&ieukl#vOub|g<0_.;k"GΙ^Ls2<ӠX]'+}r\;[S1 ՓmfOwH9K*1msn\=QO]Y#+B\Ius_'{qv|8-X-)z,TjR\3,ݡW#e)-n`D ;;HӨ@bڗ<$T)?& tAT9_~ڰQa!d["@ |u]-fEq̸d 'ބ:rcu(Q0QSAVJ#!9:͉UF %PԟLԞBȐ$&Bq-Np)C1$r+uDKP2 fHs2_RJF4fG o_03 Q/9=D,fn˥(ec $8<0B[!)HGmJBB$k({o%+aA%D;hDmB9vpEdϨ k ϯ])WBL:^-^JֽRRΖBmB)?W<_}IxQ7b]?Q$:aAO5MHZ@*J]jqf8^ۧrSWήƳr-.Vey+mubEAX`6 ^ԫٯeRɣ50) ZX)18x#o#8`uai (h(6D ѠR"m ZZ@0n$7IHY0AQk pi bNKN"OR{J"MWt-# tr2&Z2Bw&0҆ʏZCۑt%zr!);TVzQ(P(Ť2Rp?kqm#z ̰\§*!c`t%ݔ0s@0dЧ J!j~,G6.4/L/Nl%g!o%X(U Umow#>jzA2 E1ŭxXcd1: {+'43wLF|7}WNo^Jd3j5I>\hb5Fb9J$.QRw_f1Nfww2:1Tb jjL0(}Z)!.Z+ٌg~%5wBnQ:a?% @-;Ib[~w4jP[a&(\z />"Q{/=h˥dѪ}$ xٳ\I8eNvj.*ZfTu0w'II3d20 >c (}`-C},۫;^sdIp.G\}&K,E&b}W! B=KԍPtc `2vʺdUJM%RD>Y -;\D:GTlnO;?Ds$KtgA);ixr.e@Zded#~W^b/jC4Tv<gtJ&VTTD5 SLUוSz v PYݭ숼cϨH(m9ֶ{n40D[rQ8 Եe>59UܴkDfoRlzg,v.d DDG2nMٌl:l0~ ߦ~^W4$@1Hw)5*9+ U6&9(D-#EiT"ӊ-]iF$<K,PcF.z8;5][޷}m8Q.[pn. \*-Ǽʗu@]]G:;WyJ?]sh1Uu'f*@7؋G90.U{ {_Z9rH^s9@-Uu8pfу{5ɣ:FS&q@Zrt8E2,q ANJH1+qa\^Ze<\vUpr`wpָ (]Aԕ/U^tծ K,^q/#+,c+ËҋjnRuݑu/,@k%dQZ+^nCua}#ʰr WF>ϜP }\'zxX;Y񭰣+>]%ACl\]^AHC:q]hb /f2(5"Za}]pĢ_&^4'@\QiRԺh "+]"YVTj(LZE~%ֈIG dtp B0*U82q{<qrzdYqȴr;2nS%ӵk4ɟ~yH UϦbY(2Jߎj(5ߚvMTQ èe*haH)qA ҐrK'%ŀ|]avEQK*sn}Q wO\5jV/ˑ[̞G*0Mu,=bnF$~'zl]~\^&oS5NKcrsX/߮m(}ѴÄCv.\]nۑ= .י`ZbՋNh _eFV/ˊL4kYR"?:H-qEk\Vu}wCM768DHWvwz4p*=w2s w8є.lZ.bXG! !`eSjBipki;iU54F y %-bemyDXfwSy^Vb k)tL55 Up9-aץi:y>CnEd B 66!+/7l8E!c}P@$3#&@`!n+Ec (LbFKk M.:8OrmbGlpQ" ftp&DyR PB= u?M*O=un610믾Me>"0^_?&YǍp%srLE@ӎV0g b)䄢] ờPƏ<2 9d!IB |4[̟m"GݦlM߇%'Ooޔyc884BYM=/Ls6ͯ Gcyi,ƳD Z s p+Dk15DQèQZ[HI]JG)v?zΣY]}E5~ W%3u?>a(66[cŏ~x- ~.1rbR5NIʙ5NItcVYᐔ))'QL8B+ѓRzelʝ KLiy,? =^`t9aj< $7Af<=/k5136ʀ.^}Me7᧿޺Rs @,0N F?/^a/7WdFOZ$t)lC_S|`4@H Hn?bn1 zGtD,hRYI $E\> Lȏ`et.tG^}6dٯa;xy%w [}K~HH$Jv"lspSf?˗j#s4XȒX[dvuZ]uIp8uڢ}[E:݌ G-2X& 2MHA"(UM#d^=@edgK/vΨ|@ S WPjS:M}6 A9C+0D"f=%H{BͽjžF=sLp|%DܕaW#\rraԈzboc_r֥h9NNj|Ad}UſM¼,9(ax> $FT͕594w.p}Jv>#z2K^kOW*ezzempЭ['5ɹFrd镾ٯe>lDf=r5YLL`B4*+A"zcJēFӝkøBNb#ڊ}ot顼q."@؟_˭[m6l|0Lǰj'+~s q Dg+' q1ϩ6*uyK7fHj2f3K6X"Thn2b/Ш9 g~^л#@aAv_72w\FdfTAr8/ L4 Çݏ>!`9eXc>lM* t8 8&gD]ܿq9p ƛm!518)S0:Vˑ9eՎ9M*ʇ/v#r`= !CJ0t%"S2(YcKI9,-o uF8bjSX=/kr+<{w9s }ʀ{%l8-] طFT_tOД>kў3!M\!s\%~3%Z\UeiD V^R ]ʠ^ek ?*a>;p[ zBzr|]/*iXkY{ ۻ.ɭF8|q]kx }u֨]lm}uUn^a-sw^M[fyӆ8?w [s)ko(-yw!D g'8~[`]X^Ӷ-0omY^+= 6-3[)|wb%ۛ*.,X%{W{G7_<6SD(jn?=ZWIx'#ɋ~>&%uϫQ!#02+v?y1$HuݺOsMyo;y??,'7_b8TISu TCN E1Z_1A x8`btStΐ IGG}'Ac*xLhC {t{~Jz{svFp8@'};^d %ٌ^s(tNC bazt(4BTӡ Tt,cuA$ e.h*z174ЌA٨B_BN(g8yd= Fă m! ӋU#Z|ԃ}E1x@M gCj(CP=ZJWkMhqքrhP _#XpZ Ej)Iٜ0'W[̊/(y9BuF $S)2W<'!8@Ǫ#T)uJ9Q;N*+^! N%ozg0Yn+mL>z]śrH}s/~O'ެ |Qɤr ki;\0,J;ce$y^PfrIV 9 Z(T(2G$ ^`GbݰA}zu~̔rCFZ#%[=lxŎT{;ҀdS#r8 `l1!bZjd9+rl))0Q6K `,p6W) ByQ0eSFtfLSG k&2q dLcdRSm2$FN2K800„c EdĢ\bj@T њs9(s&,2Dž5UHjը9d2xa, ꋰdI[<sp0R*1OY~A@j&\Y妸%n7f僛n|Ʃr?\'ra |1;$ݏL@E2N2Na3f9G)caLB ]`+|9 ecO;ZScO;_ CcO;jRϓs\Go8D]{>CuJ}'ׅqe>,Z@E`sW8"L00[bۙ#9F]!$':WX!"/_P5ARX!/Va3\b:3XD/Vz"찤aZAY/fR_,<[X/V2/9[Xt~LۤehE+攡\bA24ωqRy+B"4gk/lQ(-Z9 Z_5:[Xky~F_Yp_l)S gn\ڣ'PPZ{ckO`fܿ[Թ6^L@%2:kI%Ƣj% j5`},V1F+GnK+Bh'S+GLcճRVg+3޲R!dKT^]h+'A:e^_6 5R ڢDa>[5λMmn~iy'ӂy<ֆپz[_y߀ 8Yy2T.{Ue ~tnZg4ao??'ˇ,@xh/i7Fxե 7muHF>dy atM}48|w*)NSל "::N^KyM﵂euMV<8uYщijZa֔ؔ2{! z ~bu mr $rFTOD=6 M^b60:L^{נ=͵{̖#'rMo+SAwwY?NTv~1+]y;hWX ݁ X&0QMhvIkALQJZl6ǵfa[uP:7+zXN ,:@oĭ[j:sV7nLyy>|g/zmtyG6ݧV73"->dM_xv^, Muݭrb2[;=RL1ś]qi M;\~-WɵGe=s?? 掾ut#=|^U@r#~HșhLQu4&n6Xt~ϩκ#.iݚ3(Ҳ;I-5кbPFtbۨba[|֭ 9s){g 'n6XtQ)κ(iݚ3( 52oXP8uŠĶQź6zNRm[!%[r"#SGuŠĶQź%<Jޙu TҺ5!!g.Q2YOY7Ip/b1(GQiepԧi}e]ʴ 9s).z"nߺq`b1(#:mTn O9ufknMHșhLiM A)щluKwX)ቐe[r"#SXuSaƔ'B"1(nl҉8ݗD[-{" 9s7'SkWCMϵŹvg5z<=nTj_Kx^V^x^vkp-sx9&5c4L2Z㙭VIxGcxg<Ө&HJRgAƳ MjA . &DxnQMy{-:Ԅ1|1fƘ5QJ11ܨ&h4JS1|1f5CH11ܤ&ySƘPYĘ$zx1fccIMŘ9Wc^1ܬ&-RƘc1ܬ&RƘc>X/,`5Ƙs c̾ccMjĄ /,s1f5ͤ2,accQMPbl11&5Aầ-RƘg|11F5 9B7Ƙs1k{cQMŘ=1ܬ&(.ccnN|29Nc^K2ccj=f0l11SKVuv0YΫO&)&8[| ^1-O5htB\Ms|L8@ 2T/m[%Ρӛ"_l^jֳR}[~?쿹xoVO[H&jp}Ba"p hNmERcLt1.ϲBefQ*}f{Xzk_݂ߛۋfq ^O`^6MW˹]]IZXL >\gSƘj\O5B5ڛ_$IB/=3Ey0w}vBVѶEJ*^RԴ&U_yκx6E{U9U1- Cs$6Y%(Z@&k:g )8Ӓ xEX&h+`|]>^qȄݫ`C;zxuv|oT>YZ"^y]@WhAq0bQz3ʹg"rI)S)h?L4:thK0P iZOPӇDF" +edu'YP\m)dDkVBg@ 'Wڱ< C%W.J0X h٢s%A;%@t>ʛƣi sj 9_r* A W-!{RH":HTSI CPh& }jpB,= %B-S c%gLqjp )HDBvk %QU[BeBh1J(qQ D$X4"JC%3FSm8 mҸe& .@h^>ӑ /W7~3|T1 Fc8./ ݝNzWU e? :c C;SY.)r;Uc;39&x<ƥWuEQie$>hc:g kC-7]p0$)\Z>iC0p;P+M ٮMZ*j*mBs\ȺA4~t׃ܠGv| nkdF+wImTo>,ލYo/; ]ȵIfם2;M \=|z YO9`iCZ1d:'ZgxM3N>[/H;$O+51BvS~'rEɞo@[0fdWp@RkkǚxD5(YKW9KeIہwCwPpz &Y٬f$}SSsWF(7O|㋵c{ c]HC,Ii ܔh9A}=\~t^}gfM3;~|1֗c^p-ڷ5=9_V?4MVh~yuF*q$^~Hxqijm [*CA' c^+Kt ZJKS`15>LKKjqi#Q&y\zۇjҚAyqdS\K0AU$4Pzj] Z+*eV`Щԣ5P! D=Tchg ᒉH&,4l:HI}b2$mL|SꎟcQI~ˊTcn?~޼quʖ'`x4]_1dPg:y.7OvzuM/T?B^T?\fSwsp:h ;Q}s DQ)ws=o@\sAPFxrvR^u&uV %؇k!6 =,򝱮@nVu]8j8ذ4ش8(ӾNUqK8:wïeuы!5iL7iba!S%F}GA5l.I "pt_# TPu0Q(^ǥ^tG٫ۯj*h25*mӌp{!*͊3Ra1:f,@ΘOa)%+6B-Gݎ{<1IaRX=wE>/;ɎB9| *ޢ·>-)\b Hvn=_5s .n},~wmGnpԙK8tvn0\T)룯 ɼL5A/f66_G_UeCB╳p!UC@ ?[*YHUQ toa*}9 _SZ}~23LܰxE}F!N)EVĢ/[b&4wSTT} CvkJp B JWXTQF=5ZX{9dV/9ky)g ]U:FdxZz1F!eias-1_QoHoDm b6O󻉽J&lGjyseYkJ.5ΫߡlҔZ2ֹ~nFg?RE8Z;;?/Z~ҵ!;!vkiTQCg#\'_ `"e* ʈ0c 77I-y|miZb"_30? 8~=>-k'.2V2Nj69YrR>'S|P>/&Ovw\T[nsfn,uҿ}qFU'/y8ҒkΎjdMVZ˧ /K˧;YgBaJ 8[֟?m;垒'&J!'8a2E;HPhK'*N(CծpogxS "05–y .Di}{)/35\Ka_^OfgQ7GF.=\<ϼKśz̟{_ہ>ܾU{k1vc I^(;4>'LR*;oxn5zFc=[jՄT+86CFV_So⹑Zfl!d{b;p0TўӍsM D맞nz F?NK!n]+1'p25ga&i#(31jǤIK.J[iN8[QJ͸iO9;uDH|yO"R? ؃>~'- \rtj]|_o8 jv!cF.Ic*&J4$Nt$mtL\0*lSrK+27 [ڒѿwBbֺ/X.aMf-4yk 316Dj:e}нZΡ_)_CߜUZCӃk%u{ÃIy{|_q`X 헲Ύ7/S}v^{o~0ꚑ-~;W0k \cȫh0C'r'2CHswqY;? /\iZBq)6yB=T:Bttp4Lz4j!) .PJ[媔J ҟ |a~u F,2Vu~éQq2j bv㻿j+ţZ=֪JBI-^>CJ<%.LǷnRp&t\1[|"UNuzujJ5l@|lH-,r"8S_Tf[S,Tf!AOX#wӥd,0F聿1qL_~.tah N2%˖߂>If^3aլ *;ב X=6>{ZEѐu1!Wz} _+J+u\%NXcfh Qxz.ͫJy\Rt &)q{v =YKv?.ZdhiL`P&$zڽ%қdZ<ͥ !\s(l0@+RoKiTru]A)wNAdx"5̏[bPN~e gFBs BBJ RsbXg.s^uv KVUdq-ٵ_1;x;'ŧtv_yfV͙5qL8~xʕdv<;('-,hyH$g >#ᒠg~[Nj&ʒ̏ i 64R5&Ac2P-?rG Dy{E `Ro>\괢lqydq++5`J) O4z(7^g!p*QIjs Pkju id `մsf֡/ْb$NGzoܵ`Zҧˆ;: XLxCI2Y;- OhlQߏ:l:SѴQ,0E܆f=>{7GFٚ=Q/.?^.}IT/Zb9M•\>;tWC2|]ÏmEPTd73+ |hY grB`frvF 3IيDg'u򒐭4nRR*/eTIHk*x/ 5?VIUD#W`,?{pe*:wq ʼVП o]x10/ 1A, bcP-,oƷx"2!zƤ8bJ` qTG3' R[{0G n<gՑ}cA:c.̟?3{ӛ>gg?`AVa̦7ŠOYM(E޲i )RsWl{ڥ]ީ=[Y$:vw#ӣ$.xƫg[jQO+XBO$&mU؆\Ư%\u6䶵ATuat< =ĨarW PJujrg毛5:E햚SidGoΩ@ pe8G"nhҲzc>0ɛn>_x<֨6M7+CTK-aTĈsZ5\eY5t&펻sA, I++ݱƤ ☴A6c1i.$-Ձ OJ dh+rZj,BV]HB}tD]}GrD$KRGA6[q ۠*NA(FSȖJSL4c֗xU*4J{lh(}@>(Ks~@sV==ddzj4-~XT^X5 GDKCrz)vwfFAi~b}5qu:3C)'Jp$!ʈANƕ=Eb:V]Yc h>@5h{W!W* V<8#)(c3ŔT`It]E0%NAPp y4\xشH#]z_ė Zj%q>^ y&#!ksTU<\= ,{et(/Ώ~] Y^E}L|\/(CB"; &[W? ADՊ7m/l8wy}boMx8#h+" o( R\,|q=1'N6\(>$ÕC$ma]Koa θЂ嫖›p7/nnKܰ=gӋ|](W܂poN亄hp6l0^_|_.nӇKLd?JI.oNo~[3MCoH}p翀&fn#:{xV0_^9r/Fs}JkyAb!D0:–9Pqh;/1LfCĒ3]&zጬx\KI/Ve, z~ z8FdV=Cʅ;M㺫8BO>2bqQPXҖF;[jp 1PY$!1@@hֈ?\'m7S ] W П cy"ZE3v Ϟɍ\S(w;wTi}z-X [8akP 7S,D*<`nqý!@YR̔}r!/])$0AR_wTP4YO"8&w y%IBfJ0y1M:U7њ& %22bAS0=VsdZFŴ,#ނIDYڣZ`t:sFŷV_f)QʔJ[*H+2xϽ:Yzˌ6Bx]!HfnnKqr*lGbGah4;}E1&a>C)v?rvkIIr9p@ (xv"R\,TvOpu cqeTWˠ'AX x2%#d@*-edPro&\PH؇SM\ \ ^[vOs d.EpwC"TR(B.{AV ًbqMv#I: I{B@D`)j޺y%!/9~@%?%7#Q7TX䒐܌VøZTRo;,b+!),b13 HhqؘQ|RK@Uԉ-##,sx&|Mϸ[VzH-,[Jd4"Ig_@)#Vzpj; SZHI7ԗslvǠC( ,3p qlf$#BuvOȓr=LX,0ZOJd3Lka"&оka LmpͣA!%Tr8qkW%! jG_u8+vL*1-%WvzS۪H7&6ӛDvEⵖt{Iͷ^=_o-Ž-Ob $7`(m˨$ib7%}Awf)L]$ƍZP<3fP HG[,];{|dd/}R3V ^ )-)R{MY{3oٰؾˋX& ]|ۍ sus'"vvh EyL%.r+S<"ni.غ:x `sj-zkw-_ɯ OJjv96YPk9,S\k`zEл8InƼ}~Iڳև3jPѱjjBw./X\ߑWC6DGñ(VA2#[lX:.Q u1U\$o ˾q@[ĩ"T)J|>QN ?Wz;YHZNIBm_wJ6BOWS_Z*!B<x1B[RŭbQ=%tDOS}ul͠Z+‡+؇!uc5W%9$)ZOdI#8eWPUx _6jU: W+-cGϼY=c1ck[f؄b' FUHh r8ٍfCa%xZ.,f >cAy\U#Pt^&V=S6Q¹?k~K~`ϏaZabQC_.˔$oV-Jd;4ύS)U}7NڥW0EwLG> ȁ5;s}]TbzAbmjGVZz|tw?n8_1bQRoȗ a;OTN܏'Y~.bUWT$i?TԏER2# q $ s||O*G2yn. \yt~-zTpsiNP/6H4BZ+B|je!Ncldh ӓLP-MZmC@'OqbxQO^ӅyH'3`5@Ҁ=rzk&[_\J)츽%ҰBLvakg,jvhOH^L)czI d=`8m7*vd*ș<0pc#`uTi?,D0||Ao8(ˠ$L>?}XY`vÊW=RsY&kig|T?'.zfv vOH9fA2grsXɖ̈́JmTTd(ٲԑ !Ji '9D(*t7/5d}C{HaQ9V$ Ĉt%T*p/=xIXL #\eDQب7a9 RSZXOAIFܣ&&\UiHDhzwU)dg3"Jio/zaM-_ `BtDG &Da@**22P2[-AZ{`or̺j0B4ڝㅭt;IyYIqrB[Pb鑿04҂?O߲֠L=qbR|1;=˫@^ݾ`TX)wxw1Rv?^^2$R)O] )1pPt1U7Rs\RRn~1Yk*Mkp$$Z,L=b>?]c-QGǃJl(r2ؿWiB@ջ("UMQTceHD 4gIJ"};c(ՖƢvט󑉋kh9>G1wUAz ˜#OZ"ۓZ9D͜ɽ6FuZX"fg3/ֽf5C:-fV>ܔiDTU1 dNz K㥄)=/=Z+Ppus]EQ}b6d+ه6Ybr"{i9=Lݑu_0ˬ=(+ȷA N@AUk#N=ֶڑFΈ4:p x,fRffA1ߜ*O2 ,}t+G2(`іxWy&?hrzS C+iٚaؗrPʣA`NYbig~LN 2|v׸10Gjv7aox, vYޔ>:iv߆3J㙉-+Q{eu+O X{86zZpF᧋کYze3^AT`8kM =a8Kl׭5cߏs_n2}OS^0BhU[w8{_pB [v];ͼ߂V՚|>YL&oOwnhgw'y?mEv-tGXtO"p!X#:h1^WbK,%5aE]]%=L|9m6Z `@mޯz,4SX]@$ff`(Zk6׍U b}]Wsƣ"z#)]Mֱm>.otlv1-vel6!}|rst%[i@v]npT:󇃔ZyҞ: MϠA4a%vi"0"/,0(KHM\i"b1) K19?W7%Nw>5٘Udi=xyX-y,bW33kU>4Wav j; !59Brn=lhdߡAln5>@w#.0q\!ш13v_#Eབྷ+w{> @07hYò܃l {Sj<^Z NaGikG1aA !^xnV9~H,:4&Ɯռ 6# } r\L)FѠKB05hsg =jyk2Ԁ`$39d*^+-R ՠOl;*zP_&B-}'}:!=4M`<^Vψ9nQЏ$L&)f2I1I=f] d\We=(Sw>4 BQ{K(-9.4迁?X}Z+m鸇s]ֵY7Z3Z7rusC4Զc4ۭ#fA0U60сuCޭ@ s4ډ052TTzm~oH2/XI4DKł+'A:^*E.MK|5P AXۓ._X,Gfš@ʆ1`f2phH PEQ^[ˌܒ:r"n6YC;` )z2׆ZӻX,,Կ4^2 ] 3p-pPBQxJ "Şg<%A<]*ڜ7RMJ^k2f1jkW"Glew̜ڸ-(HW1֘75FTسaJU&4j€}.맃jjW7^|^y.`SfCTdrBKo7 MhϬS0O\ĭtyph 'JU8Sz4+o 頜A11/bew,6y`Vgbܘy$p ޒҎ3¨q˵_5ao P+s s`wtwءpD3n ) *QSzk'2ecQ8~ʰZS'$-IZF&eTOfiʹ蠔FPI'蕌JWF|J,;Z%}ܗy$X @4$'ZZUTc`'nGWjQ\D1rBQ{0jϗf%u-eoa\V~x1L\_|_< G&JGRuV3cW!qNjQݻB m3N9Ym`xv=-PV{}  +GXOZ(]HE*ZIAU(%TD<ݵ0{y%́~C?j. 9YCZnqDAXf˺i[jcTU t N7СAVbV;gkRQ C j}Kk ePՎw &?_̃;ij:<'Ar1lMKXipƎ4VIܫ? hpPZ ,ed1 y10-!#OJ@Ez88,52LU} s@:Rpvl+/X>[6!zk;ϔU^);'wÅ&)*~(~jABMahr&¹F&m! F*DDcEPxr|o ZOʈ%UWM]28X/vfixU\xr+c:԰V)pTOgø8Yeз>={ I#{6$ /~?]\8N>D2|8]_/=3,;@lJzuuUu=N-_Dx8>3$ R9)癗RYwak6X3-[3dHw+C^3Im< I)a1@JZQ PR2d["e)壶d!Q5䠝>vS 5׼jmJ?Lu!e11Hg:RXJÜV=2!P\&5k'gة1ؘjtDYF@ dp$8$)%8( Z("#0ar)|B'QhN0 la*@,3 ) #HpckIE" MLh,^Z t2# 4$8I*;1B1inD[BHQ!9@5EF}D3E1""'^MjYx@ v^f@YlAy)aJ<EMFqMڪ R_j7*PZr_ڲn8@O^`R:>皡9E&av3C k8sY_*AB;S4QDG\:Hࠧӌ+ARrKN$֘mGD>pSmʮ ʅ/7L\OC8z֚Y'$s tHFh8B[),yj.wp6!xf2f! q.A4$̮409'0c417`jprJS(BF*$/PoF@Zam3L wX&I.pJ̰>疰4iF& p"er98Tw.+ctcϥt"O/fԕ^Qڸ(^p2z-gǞ#(d"K2<+`}:͈ji N ͼp8C(I4SVʸ<Rm.Oiއ }~ .YB^KDshWBk"^샟*% R:phV l1i}v}""״}'EbV"y &5]4LRdU@d\MɀHY"Y Sv+V3Y5d|.gK{(JwL9K[kΙzFTAzϓDu$:&Y}ci\/ C֓fx%lB>%._ѧ#1!'`:|^eXgW|`g?i3 ߾\"2NXr`]{ ta.`v7v2,QsEU3U`S:hC%jCE{y[A(`7 -m! \& 'r1>#akOhL`4*}gu;t^d-._/LͯM/;{m9o"bByMܳ,^"!̛ vsK47y漶ģf.?e'gɕ| gFMށ=&.V?m,%HfVWKkK , *) dHX9&R[&ao"]P߃…+7 m۳%*fw%6dΉ"9gͫ6<2>F|tajQR _<ݻsw 7gX$+O4m-w9CE+W Jvfvp1~MJ8bcgYlIȃjA׸n8^t~q0\,AÓ)RU>ȹO`u2ssqw$\GC-$+urVW )a P3Չ!cn99ZReI(lP {ܨUXq?E`~ajsߗVM2 ֕woZ*~exy>rG` MgSb3fڭ y"$S=x [* bD'u*>Tִ[HVrݒf\Co/H.mA;g>&!`|(+f{L.rG]\,F>4>ah!zFWW1tc9I̒1Q}$E Qg0`@Y'!A7:&,su3 S`tǪDӭp?u%U&\ٶ Uz (ו' ^1#q%-QK>ӱ7l.7L`jn_Ҝ?Q[G B1|::1݊f%c/mU?$z6\֚Da%BR<&ͷ9؅"qݍF_d ۛ2辷9l#]۷oX7 Q{?:$~'"^OF23\ #J)v<-*c=^ *\l>㥕Rm,)B '=>-Λw;P𡨮i=,,($п"ܻėKS wt:Oى]v 3 ]8u!w P-+xlz7}]d4w0wr6Lr0}qextoo:' .H!4,zdQoFC?=3v$Λ#׻z?G0;bb2pɨt/n!k=*|1""vdf[ nj[lIAA/ ݐ+ /Z3naH6""v!(m*^ٽM9lh4;F7|udpw۵ VFT9o0V<׎|~߷M{J??}x+m"L{L 319w`Ud.eQde7Ƥ=v9Q-y~Vb?Zpx1RD NE_!iu%q`JmPe_irJHYG25]'YFT\P<6GfaN8^1ί.џ dQ[R,[\0Ķ uͲ@nmA ӭ1!r*Z+8ow(ށ5VvW;J&V㛕UIΚ$]ȼg9VQr~s|pp9Ln(|Ow˖Q3ݥ"(*k(x]xu.Ucqf/qK{\lbK5Q"`rqT^2A$Sjm6JN1ݥ]f1+Q7@54p F~힝J?hɗ&JC.( 3ePFr"y?#)L{ʨ<2Y"M;rD'@-I]wuS2b.bFFCaFّMxfɰBLIǨXL@;^dD2;,\(nR~ۚ LhfJH3T1c dB!;-<8n15qf4svK g讷ƸTH}٩4>F٧;OK(nnjF6ȮCB^O]&K [* bD'u*֗?XVrMRZ;ڍ`$vKAF%^=햌hVrݒ))S$ӥ6qvv[tH,D)37qcޢrP? Zב^COb\hиظi>q|6Ϯfˋ LdRI.sߙqgoCggӠ FW DT&)ѻ-~UwE=${5f: M!Y\3^|'vbgmh^!x5Q=|⮴}|R:yW,j#tms!(Fz,5\J"Æ!vT/cHd}Ε<`F{d}M;_cYm&I}׍E2di(߸w-ecc0YF  #`bL1dofq}F PZ?QٸU_E[9/Ю5|Z:SLaR$]I,.@@%|v;@ HYABtCP\}~GͲ5q8 (|8TByR׿sQE$\NI°RJ$bk'{GT:#j6*3^CD;:~dvxm쪈L"D͂'e8 f-OfX6#Ƒ#g3Ù糡&аy`_DD+B4.܆@^-c}c!8D_%E܇6U,=%Rxh3.idh gn\ .&*jc $wШ+Ѩ;k4R ^ֆP (٨2Y%(;k=xן-}*)I?``,ֿGs?Ki#/PL ?ڳb5u95TedR>ZX7L=k<j{{Ԍj򨪭QPrLg.j#ʞω?-@ ͉(n`jYID^Iy?UbNj]Jr)HZ=a&轭}kxj a{cғbA 0Uoc!ҶۈA_=4tg*M=SHn|3:vc({\Y- ݳ2nNƔc!RȬրɗF-ժ |bןw;#|߭ۛUFVZk=?__o_z쇫//ן_jZchTܽ(+RoK}sSq{KJ#FboNח G>JX|^Xn~>?0yE,HxH8]؈ʚYKI @<.B  <1F$aމD ɕvmy+A{]0٫?9ߵ%58Kv_k%ҪzEjϾ.WFdV.2BX}UlE○1:0%[Ԡ5ש`A MXv;X0zO^|yxM%=}l~?DkpVjab}̽_ 4U%Ogvk̚{o{ҽ,x|Y[aaWCLhvu"xy}X#4zgΗ&/ܯ-RwndYk|χl+ᘁs >FC(@=UUūLBbz|4 v!Xdٖ!Da!AAOlKWs9l:OyO fUntdQ@BH9 u a͇v%_/?]@hT6V^_F(BuRBgtسƣ+],vh:@*O!i.=uwIY5SY,(xt梭%\U=\{]Xץ EijvmH’_ J&{j8ݬgf]eCKw{<* 6h?8(|QE_`YH2wy(ut ('\b*2΂]uЅl ]`VEqHFcނ6|w FoCnث{3kemD㾽 +NCv-{ijF)rJwu5zrUgsB+l4m'|Oo{GWِ🫲*ʱv{.s0J;#߭٫q[:_}ϓNG6=[õүJ6]}WWY@>=ܓ/)׽F9Sq  [6U5&̵eDL#YԼa_Oe4K%Z\{wMXQMRw(+3 @^],Il(:-7Vܳ0I<Lus`Rn]JR'V@`!`[yd2Mt+]mu{ bl,]BN]QRPbtuI.4[ݯ{.BeDVaF3A!3Z\`XS|D],譚@?\D[}7"-mL :BL_1 VfmuZۨuTwX{ImpDթli"xr%AY6C64Yk͖lN=m zVSg.Jh3ft'%vV: f;(>.xkZx}֊wwϭcG Z8)AA6U_%:OTH]0N^x%·E9h[-AaOvC̝*Ӻ+ZȓK"$INeq&+`%Rq5kRg;='\" !ed'IЩ|dy_(mXQKHF@ s{P APKH |{C%p8M2d e7S'|O6 j앁w3j-@X&/6)N YS,+p'X&w **$hJWC?w ]rjg3(.!E]$ksJ82%2Ik^J,uEy +aEoHU2 (&2HM1jIh?pk؆S%>v8 3'{wuDdV,NP [Q)D +89{JTRVt4V`t\XPz1Z;B"Y9(0'&h6mVJ cG[8p NYp214[\^_~Xd1sd\ݾyﷁ}7|:~e %1þw _ۣ#3c>۷#OR:7?c𾱗<|̑_ s 0`3۱IJv*?{|u8Q"  Iy;|옡 R6zb3:$NU}4j* D_vבqz-ڣ4llU- @i-\+Z  \-ilVٯeɼ%JoI-OֵEͯ3Khlyu,l $9_ 8yGF~m,`>ʣtQldw$J$LZ~/"2NwkzDĆq:܉fu3gg !usdNj\E.8Jw^dMv|b^3sɾjDLW F̋$nz=eN׋!T,j%W:KD#:? ;h;qv9rHCA< Ar\Gc*KlX[<=|_D%К(*o-·hK[nErE6jkZc Q7fy''R,%+m=MÐ/ї"Q޼}Y|pAd+^(&N~1g2g\.?$Ɋ,ھIyН<ډLI7m]DXaUݜH[vmoL-A*gIY EjڄO]վ_i !Q&-AAw< OL kEڎD>2޴̌.[afMWZ;6I]X%n|LThKt$xS؂v@\6ok5JMɃɃMw~'ʊ3瑤ŠgaXUIWT-Q"%S*h_&Sfƒu\_L *HkQSyI*͋ ubS{:5Ӄ/[ { w75no8 `?;/)\UȘ(V[,;''c}@/J }RJ\$hryH7Qb miu4s7{T Go*v/n ({ m^Bhj"62H#O0zeV:4O357vۇ5~ӵߕ$/WH(!Z-mT$+ HldV)D.*zbx@[ .;؁pъAХv*Q^Dx:-KJSK 'V?iShB,M,:{ !W=$s+'gp˴W_Z{:l`%>ZLfFGjTc, > F2\w`rD( Qkg 푤ґ5PInkFt=9m.o-yGSN83-w^.ϗ=QtV)SX- %K:]avK؁wls9ӜotN.N/{ByɤN4+ FPN]ݳwX~rYf[S4r@Q .3t&X)/40/Gx^VSB~XB;RUS끢$y&ԅ|CLJ ;^0K5KѩbzGRfA=PԨ!9II{`Z1t}hsJ໚փ84䖾)S k(-g ŪS/J*E"tmtX:+Qx g)޳ŭF&xO3}2$ߡ!i{7aK-Н<U9l}cySK"8> 4 B8UBsiySF};_={C:@tRg#/NRDI͵k,r<@?'(MzOKE"Fn43F'@y+]<Ɓ[a>FTkۍ #840shczcn.Cpqi'wIܞۇv>RB}IA].{̠NLb&R$bҥ: /{ޯ-J鲉*e?xrM[Պ_kvQbYl*D`ℐ`Yˮ/8m^j|+cRNd 1EnEn{G&@q o'e]pV:=Nm\ыYUweҔ֪֬3HNZWdۥ,kl}RlYfw)'#랜*`"+9aYU,1Vj)@OGpwz<E朓SW©k05pź;dH b|e9IWrxMj{XcOr-/E= ˻Ȳc }?=Mk>) ø6"ƈv\n lO({C/w>?S]/jmyn7xV#~YhuZVGgqݹoϸCs+DbN.ξM0}_-[P6K3l;·Cm6ztHc8di뫅?#N|;Ǝ1:t̮Jl?#%'j֑.yAh?XQvt Q㢎^4kC;n9K c+?bUGwr9P`!^) ?NܽӅלUZpfOvJ&z>9YdM5qN֎s~OMd~6_M _UE?Tzz*IXE80M2B1hR]倥*:k]UeJGL&XGN|ȣkTqCbO jSY]Y(1Bw9^N̋wY !3IH͝T6aΩ(c7c"Ȣ Ŷ/V)t5gbh#.6;.kO[$aIy!pRƚI;*R5@!-"-^ͼ͢xo%К(*FڳnXLН5& 5bJ>$! $Ӈx} WEpJxbf#*cL[An+Uࣩ!zӗ-Ew?AXr$ z!g{HWA1]LfLn&a$ MǪgK8q>?,Dx#GṂ7z #*'ReooC p$ʺk.8IuX‚:v:)E<ȱCңhB ('#ET:@Kk)ӫ SqMS /mU)>;ccYgXKJ֗1fIwYݬeҾW /5AeyEDy@FfK9fXLRG]R0QE *䃌%"W!ybI^l\ !tq"]:5Z FRJfZDP9 ZzJ4+xŦ@/|^ H -֪v{SР"fc ɍ.z)¥;m~9#9Ǧ~:A,}"\lcN/o'J޼|| ;uWz"烔̷4Ϯ? L{P `}פ5|n>[)V;) L~Bc6~dj!!_wx> |滬?CR$ݽ^95ׇj~hO͋CF 𽎂ӛ< h|34`8}gƟkLLN?`wL51~ OA\ /_uÓS.9=+~e Dvjonf=}Cn/8h ZwkŒ4U|1Wip3:;?˄B߳'W^OdZ`y.0'ȎёށcgΒ%?8+{R8w>Mjln˜n|kCBì̀ARډ=t|hfkcWMx*m\&<*nsJ.x{gnsa6FZ3xfvl.^`pqkF"J`XaB vcLE,@)"'  *PmGab:Mx`$b˙&-Lha:8jaB Z„&<L}9>#r|t7B`BQ}mkNd:o㊆#z!L)CO$Z zE,xFE)EJ"cc0E@4eptR[Ϻɳ{ EYuYuY?g=ܙk}:mE2wU:8#S\d2I1ނcGǷ֩ S`ˍw]K?aD!ӟ$0,keɮ(tY<dˆ?HEبҊowiB:0M;J?VSId]2 |2k:D1-սFDJ,AX*PbsM*(IB+Gp<-)"ܰe- ^+@|l [@rg-Gegefw"0000ɊQ1' $.Ìhp&S9VHb)՘f6sm! cm'$SԦXd*Y"D֮ ϙ ݦ#-d*X-$ZH1P6̽$wBdrv9(3઒?zvYF>5+I/Wj}; NR+VIfJvR ۈ2"ñTǭڝr?J DId%I4&Q"GT[JykwȫaYy _+obL>/9bwfPtƻw6+;ZCTMΒeokϬV*meV*meJ[իMUE{((i8-qqK(A PYfq! DF5*,mmAa/P^mO45r^ U%x#ΗZRRjS7%p7v|g|iE?y)CTƄęf̋hb12cN HNA@1-q&YpLXus|ף^n/eʼpzu zs5OQ, 5e 9-6bY*6bY|N5˳ױO'?fH^ s;!Za dkyuװK6*d{[ "xJ-,P<86ʈ3]_aL-nw$K-M$K-MbKRhf:U\6Y*Пic@-i_1LC%98/l3#QZz4 ] JEhI!O=G SonN:p.ԡ.f5NzT{KͬH^7X0\l#\e.56x*u=佋Uȃ)"j7 Zd!2mGv(v58Æ#R`ׁe@fUXhgb7HDi%zfQ0B}iIn ]MU`T{:! ?6$bGö3L0A7{"ˌwOR˳iwMNۺtc6/MK*-U׹\eEH>q)Oh(M4+YS5KZBfI[eƺUGyIsȷUY{jV%g. '|ʜ,YUHE+{[u<[%JX AgVhN%bDڳUO $٪J":gWB #,0R-f"hn-%᪩*&)y{j RH%| nS>6%)39alSVeD6%U1B|zcMRvDҒn+"FjmT 5?Fe9VQIu8┠RmXXiS{5ܩ"`ՍG`0)!OK؈4`Z=yʑiMjw6ƳfBbcC@uY_9y{鎅 (Q> 1D0L M["v\K1JaCDD}hVo˯Uuyj˷muUOޢK-'ꃻZpM欌p426QrtA˅ᣖF_wF_8 ^7e=ςy@B6Sc%Dt@W n_΋Y^PN,w J1%jǯMxXJ_\jD_TnQbKnq${dQYB(VzG5w$ke|:A._(2caG|6HerEAb$iH%*"ֈ"PatP\u(+i6mP SPD_ni"mP <`Pd?3e/ l"d"O5oTe\SS*#ER5,S˳im+Z!@[Eg t73RiO^Eb:fiZ 9q )\ `eZXuGc-mam k[XprsAmO GXPDOkSTI.WCF{wf.VͤP䱕sG SE* %PČ"@i'AF#Rp%b%mCi!m i ҒSBҶ-}8HD۸;Amqja./pJ \O6 l7k)*Z ?%h;ox#D缤Z-'Zr2|()1'L%K\!"ضknܶ-mC]S2(9F@&ej13ϴXbw?br%瑬Em%p{ypF?m/nW !]hv|]]oG+nQ]-F\78{:f,KDv&-F֐vL؈#q3UuI#GW٘mciWdJ:*2Pʢz)øflwZ=u׉S;e)*ۇM>[ݳӷ]^'){sWgo:-sYEO86BޒԤ/oWPcNkavہb0cmV'} n>&D4dcId$&96+f MFPYFxv(N~I6ٗ/Nnv'lO.N&ܺ$j'N[J* 3Pb&121c/?]=:I:@}½~ ۡv=kHZggiq2}H݌&yq2$edITiZyFL)(dCP:M\ɷP-fvG^i.wFgQQޑnDbz Lܪv}~|ãO~}^_xOS{e>Lwǩqsk[0 #> y9d_w{:J sIDZ]/v}hנ9a_ [dD#~BqLKZMjڲ캵X^R>{=1䣊I"(B5ԏ(V!2q+yIuu^AwL7V χ|j0VC Rȑ,(CUYhDgqΎ:oȨMXt3݊ӗ:i[qڭ8Vv+N;s;|d(b o\hmIEK.X%dQy_nZ9WOZcH{^l|UOWܢ5;3[w?ZJcUi0CtzQ Vhr^ㅍuNϖ63~ֽ8V a<:AT%I?ۛpGrJ*a>^S} 0(sX!El]w5^8R)n=8r,%_"m-}<;7j(t1vUSul;)Q!Q55fK߄߼I( 9yoĮr 7%8>~y\K ~JF8(xρqNsHھyyv ^[:tV@BQd'`/Lfv=)И+0e˰5neTYxIٗ4[m'\(S6,[?~t:9D-1jZJ A TlԨL!묂W@aJXGn̗oQB6=4L_@Sk1y>_9GSW'cN'_舠Zk_ֶjn,O?j@1?у?fo^sɟ|S3"eNkeSI1HhztJFWB:sW;)qXW'{OaMoMhm`qy u S2W/QĐdbvF BN'φBh*ɗz܍>qjȯ> }RȦBz.вw'};^ ZZGCbtQV%8+zgLWs!$:ή yׅ%u!~|ׅBu!ﺐw]ׅ|tHf[K:κ4ՔdH2>^#J8׉U?cp/Ǡg'؁ÅMQcNyw}l”>FqpIgff c:J%x-љ1)7]IK#sImbMs9D,mnU FF{)N!(SYE֚AT4OR9rtMy 5!clpYr LPgJzv!|hDE{,L1lcUƃ7ãfgoU ;^!a4d"ϗ`ĚX<%+|5LbS*V1ZTKu2yAb0"?ld&R &S6An֬ Lb6 k=ڜ8|d9(mR,})_3 J0\єr+IG0L RbD=O@̪"2 @Qy͗}*ocvA+h7 /*4 >Aåǔ`)] $26Zxi(p.bLP6BUN5HkH qy'WtxKq#Æy0pZΰz6^I*[xؼlr Lļը J$UHM`-ȦܽYNm}W$ܫ|-USf1Ή!)C }PC5㑋% ,1@$F\@m#( 7A Y'!UȚltY>,F նaVANcRzHrʉ\ɴ dSDY7|99 xfe# -Ԩ5]ľ/LNɭ$Z%Ξ4e}BWL%v0 ꩕?70-ȧyD`Znڋa`zڃ'[gJ{8?z_ϟy`A/@1( o.=3p"gAP5_۫,ȵRKH;G{_?7ӳ{>;JS:%LgmYeJFd`cp1f䁮=VLʽ2Q(J 5޶.4xA:![!0fiӡxRpU 4ZLOy: rzqyAQ;ƊNrf[`7ÒMIQwLV<nM+8E7gh]J0>DO㯺ry(tIњdžQwMr >jck\a8Rf=NPMډhj#Z+:S >=3> -dЌ WɀtI21-`.S@* H~#M(655zRH '(L`SSfEuܶB1֋1>L|./AԾ)l`:g:()S +4 usb?VI>D%0lpUi٨ha\Z8Qt4gݗFLrZUƅS'WZGQ:Z#$*[P@u[Qe}j;.njt։^,e**OM8&N<XiLjEg ]T@.c^AYYC=$TZmt1nPl/7ށQ\04|Dža$ %1\l*5s`$C];4BbVW@S(]P4 78T'A]ō"R(b^˭墾~P9@^?fU@Uh]LjF0vT(>  aCr(i7^,(*(0!Vt6ߐRUYb*9biCC4KH#dS[Pb  /˴qW d*- MH V,QC,*DU2UQL6KʙS 5'xj$XD @7_kFC[dSޮNF8,72(Xytp4t ys\@IKg7D8Q\qVn4P$4jl.AQ`IjU݊\V^:ӊ.Ja1Fsa͚zp& $]Ub<`QٔtxZ+2)9x;ӫY~B C1jֻ8EO6oxF$tVmQ]kN{"ZYᚬ ͇Űfk\7XZ6e-+ Nn.rA%Gڎ.VFK\%틕 }1=nl K &Pk?ʴtݵֽ FZW4X je ԉc4iJ`YXiZV'-5[H5ڕ7X O1NUqpCqkإaڏ&c$MOk\k E+9 S^j^fJ)F( s,DZ7}`ܪ/U!Pk795>o@CoxΤ290{UJGFc&X%ׯtj={MCSf15q}6]W3SYR7Z[ ՝N족}za,1'hnty h$5S oC-څ"*f[6H%G? 鐇i|NbIqNy GXrIgV,fƦO\# @jvijyMvڇҵCvײvc ܵӥ앹͔7oߴS9uQ~V-oe|o^=(/ ƚRhO!M3%ym,2QH$cj":qzLR`dR"9)q(VP˭y7jPn*<+#/?jƝ2Uԛ>Ph+Sזd4+W-[І]^WjsTX,Qc8~kO311IxdZjglk1 ڇZX &h͕vLty1 ճ_W6fIJ f6DJŽ۷(7wo/+]3=简pF}4R9}0!y?܌u+~D%#?_GNV5v[.A=d9wSI4MCAonȤ* Kyws#~+{W7}X׆\t>~Wm4 ~ȔLDZ!k{b䪅>nEL 7ܒ{? )ɤtl+%1Ui;1iEVhoA:0٣|f_B 8]&Տv>e ]+tc}w枠Ļ~)!^fpgR_iS\aC@(HnʛǏ]i :'ġXAJ _2,g<䈷1!$ݫm )dQ4S0F\]}ߦq %To+8Բ/9 WA,w\g*sL1+nUgdEwXa=}$qf;QJd+ ljcZ,9xg9b[qA,Ÿ@xݦ=i ?,%Xrlξ\:W,=q@lE1;6hgķ#.az4Y.Y'r~@g6aJ:K'Sk-4Û-dJ]뱬*I2ynYAc}OQ6vkz)?\814(PO6m ( hyp3K\"iD~U4/ ]#1FeF?z~hQ^^T4~}ћяUH15RA}$6_29~ /F./_(f,]Yv1^(&Aen8zBX5]^J ‰PEIw]`" X`Jr4e𨥎ҰQ!Q1wBJZhXʲ0D ´8>dnh,F?C 0t+c\^ϥ|Lj*fr]Fy(Wu* oʫv23~I+P|ċ asJ. 7%{V\Q &]+~(C-LyTi!N_8~%P1ÿT[Btҥ~$;Sd*Նv K]4Yq'u+[Rп?rIt~^W ]yDQdB-|0@RSu&.Z:31Mӧ_R_6ĕ8Hc dP)DݩQgp9% $4ʈ!'-dC>P]E;7]wk?M?Y+z,1vOj4{Fu}F#"}9/M{e\P lĮ{Jyܶ"Y.vOrK0fH{;BAUHoURSئ~_Ra̘R ݊m4Nb6ripO'鞎7t8 ] G56/5)ukvf ns$|L9`G/ńxu;-+.*㏉#H^lÏ CZp";FKO>&aMGf2sf_=ul^WLGѱcb/`}䕗S{suRI_?G(;o9{ԵHKݹ&9gWƗ1VAkwl w) cBm$ZM~sAw.h߹X8a%"U);/6p^9siDF,˜rFu ~$ RǪ4bKa0#lЈԏ!҈CyRG }D +׺+Ԑ-1G|o1Bj[멹N TL3o5_Jȫj< p7ڧN!{|1sZ0 wZBO/C P~~~Z;V3sf4cvx\29P)V:躱0\2y99qչN9|h@![~q&`Sz(s]i\~Ie*`ˏex n/&ś2-Y.r'{W۸ͬ0@tE7 H*v[8-T$R,G3ΐ3CI!lIN@3.9ǩT!Qʌ oPszmjf-f-)bNZ_~wkXUcrի?}_] =~+ϐE_B7‷ךe&I$ɻ$yw$@j'w>\j'.jM)STTHWxH\C`V»E"Jh5בSP=<_%,n緬̠H|l_iJQAVT:ypzmOʩ{`՜sڨRXrNW_P7_#23P% (9ǯӄ P4i=)H)gE`'nJ[ΆGq`^")O>nxnl?W66>!5qQD[[KΓŃ,R zE&!AS'患0.8 IxkrpF;߀"/;"*VbfWV쫁1`Ǎ>U|+q5U| !۸nJX))Uj,UӅ{O^s/}>#O141m{Ma$r!ްX X lR*!>ݬ0nsSV#S4} q?ȗl6 _0@X$K,a$ņ$ΤlJXy(S2D{3!qȠX1 @?FTw~$1h;ɹ WQګHc GhgùeLfԌI*@EI)m+$b +M-pqTug0Pr83D9$S%SF"oqVf-M%B_ɖӛ>ꅹ]39hKr/Ӊq_.*8lЮsvܠ] f6yH2byy=BdP9-n%E<D7OWMMI`⸶gxFZ@7B5{FTnqLTra;oo!L~T8:+IK ^I햰ŴUeLſk9ww73MWF>ג^2M5e(HMn\mB%[CjFKƞH3#T&x>F<@IS6DĂO8uFAa~lx-/<-^r]ƥO^ o7z&̌۠_|/y5nXk~!+If ]2! ?- Ц![5 cڄx RA&uGD9o%` ELgX:)!WDIa%K5Fb;߈~>'r3~u2:(u8cr7g P7~RNzt3t䇙l閕|*RC=eW7s3rt|&fjSq<'[A9Y`U-8t&zm5. BrqIkQ}TQQkn<x/n#JhX¼CKN7Lztf'R1!$ QbD Rd4%;f B\a1?Tbr/OOw.Ř,]|?rxhVMmj7.7͍smmv&s;ύW-7G$hǂzٷSc66&\\:2H55IyP?] o6+F! )}`y6 kȭotiR-;7's4tDQ!ENU X:vJJ#D(Q&Z!~ M0ER0*ƴC=?uvڥ|BLoYyjNƧ I3H~4|+on7?Bx6!qZqpuURtxV *moww?{{o7H;c:mq tV e,b/;^Ubo?:8WM0&Goq{hlmfv۹g\NNC™NA gU+(xNjzӖK; YQ֚LȺy0V!8+ 8PeW YZ=`MP$yhFƺpLo dԜ4p]•l5K*uH\swC0ۤqH!w#ĚZ`e4Z:K*2o.aBmtDHYImggvm“JlUoIs4] ~vwAT[_/U{\amm,$Hjwڣ6dճmcj{j?l@[[Sn7goɋڍ{ٛ~-{_|8GW;^:̴ z=TC?=Wŀ~w|+ u`Qxf0a+kj>3pAo/*`5=$*BEJv^AgS>XV8:N^L#t4dY\D8VH )\q~Q]X30`x!G<\E0wlۃƗ~,4ϴ = ݳ o0w+/rDEg2 -m1?M0Ljv/G= (S)y"ШThjg%[+H-+-@ª5C "ЃFs4AǶ5­"uHAP66:ˆ>OӇOrD˛*PQyޘV԰:74VGORĖ^3{4WjN󻅫4ϛ4W[W*[~jk n`j ᯍ 篍u/sukan?LB0@̍$=</b~ʊ`]߂}L7>e]AM{ZĺhF_=Y2W6p"C_0ޤWU;΍xS_Y'%eenc#45 mrcjy9}K7*L)g&8%I$A#N]V4odSVٛF`+<ה3LK^PNnzsΫF}q4Pw=2#$ WYc _҄+~%."<敷i-ۮCJ9g$&Xap%(K)IDũ1Jbɐ*N +zfw::q>lvi]*!5ka](7k?ڜ׊cx6~ub?oy|~Ebˊv2^yZENm;u73" {ok/\0~^Y枖͞NҊH buRc$*s~UƸ(,UI GR!/d2cDeA^P>/dJᔛOOd5|f>U)r3nkN8<lEtNjRŝP mG9nBV0Ӱm`PI)[R{QE}x\Fs~ V°U=9½ASҁTt٪cUcZ)c%uXgvjLgn~gSf$cDdLR3FU!R&d:tF /Q8V&pS`GڷGD{ pA.d 0$?dXR7WZPJ+tu;Nra[0v: pa g Zr@[197NyEp#nSaL)8n/BȂ'LU~p j.V ˅x wiB%ij1É, 4pHIIbSR# ;,B>. hmLHdBV2_~#`CzckZ\nI k_T|$~5V妡W[$˃s 2fMHǰaMR 97p䧥u2>`Ø٬,ң@I~]ZpLVuak7PH``fJ? rL1>SCzxƬ,і fK*mKĞ&ӳcYoQæH9]Hq%`2щĄ#hJ }RƄE&rBnd=xxsQp#nn~(j`C== 2a \`_it\(, 7yl1HafOtB(5f}_| aPO#HSM *Ƈ~?<Ti Lx2ߣ3DVC2#2*xƸ YppFHF X6\'IHp" |Z#ȝ0?q5>E-g {*O79"S8_#"cvR/ gC_Av=}xG wT=%u1)idU0M)ġHa"dbLMRH70z\ђ3@LCA3<b[n/.a6ԍl] ^b&7~6fӑʍ?g0Fr4R0BY9 X%GP$>Q“!ilyY ql$2qaedN0&&  (P)S-U`|#KvmD`P1ŸFᙍQf+ m[1Ok` 4IBgf w3ܗHiPyMy ?e࿓ KкP '05oP7`L*S5oP+ 8!=:D*C&̢TI$@G:X;$o8Xl\%o$V/EU. KjTQu '237C:3>2#.2&"aSˀp !=Ћ5AE=~0MtM(%PFF\1c[&"ȇfikSbk~{N5[5rf/ӒwQ/ԦjLwR.ݸL@So{c z@Fww&~*XbC)saש9B&bC}"[N]\mKnI#5qqu&=|& `%(p&iH "BXv+JŦeZrQ|88| aYۍn3j5hBIm,KBR"$" h$Gaga6fVhgpInY8a1sYRk:B\H+!|1$WBEw/{݋{hn7ثEIq kd]ēumNwy:q+Q[PZo QÌL)`ĭ[4]1륍VRo_N( o_ #n]?0! X0[JN1yazgaCU|-OwJڀ\ 󐣘頹Z|4-wKv^C.j%,^Z }XBRe֦ `5gJ+^EG5GvX<``ػHncWz89qY$؈sy zj7l Og$fz$zz$,]zů"YjXcږܳF_-;?~{o{:zbߜ~Wpq2IIf"PG?Pn*嫾y梾fRw}$< oXf"г;CH{GGb7oP9ŏ۸a^ˮN򡍘)1I+7Vsy}V{גqy5-ml@TY4amTMagi݃qb(=4rQqۓ.gH=㭾FLKî븭gDf%|nJD>nzrM#"Ԕz` 8:sj:vڰOڡ:Fja1Q'kCFól#9r{fm)cmcǜ$>͉P#ZoWMJ#?NJYS(Rf>YBtB 9 :y"0 '0l.Ɲ|qmF{.__6E}2@|[}oi Bm;u}w7=Rys1}JZK;˿S`ܽWgrG?RP숋VR -[h+yrƖ֧5Fv>E#-F)mGt Xt gI!l%™`Y y^g}]Gʯ3Rױ?jˀ {t2ʗGSdxfXiZIn!hD.c"$Z{3MBL:%A|1Jң0%O qBU.ir{Tg,Wr42CF-.X)+--'m*ؤxgKYz&8n :P@V*: ,+}Fy>MoA(ě.NS]H;f%Tx̐,հm;cF8R癟i@BG;z'=3Ah$hyuW'zޱ;Yt_ QYTX1YQ&HeY*Eȃ,#F:&R,ѹY`g,:g⑑LQJ^ Ux\yǫ b}#jjȮnO/; #W~}+ K_y|'ꫂ+oK6ʯ}#9!Vu?q?}joV>s3,XӋ~]k/vʝ,˫]۫;S 0.Q"HdWz?s[cU OFgOUpM7_}12=fا<. 1_Y WQW[-1E܅wU(Sֹ*gN`憸MNwG2sAO5d*Cӌ98VO5I?]|{[4/^Q5qf^*Aw4?e4@Fh陴i4܇; O(=̤O;{)}@H>)Vo՟? }O3Co9 <F] xRiHm@Dj&*91Ql3 *db$N)چ?J( Nj@<`?֩IeH-_ieWyi;+\EdME!e8+Ĉ2g ԛ)HjW hE,^2q"-4ΡNť5je'kRIpĀR8^j)(9v;( %zf2ϦRKMm,)S3 T\jBug9j:Z?o4r#xePF2(M+.4=|2(7%>%)t<5=IWpX7Q LH }\qy|GzgGzָG̿n7otr0zZj_Ol&fz*ymx= jo:8M={S7yه.Jw$ 2=:h &dDf gőv*~9]ёVYL}~{pE4 N_\\/oهl4cYwTb{he|bֶ(X c{RR(HX(^eE!AGsX29 ۖ :u- 1vm⋪<ůcl$!g7/!+J%)CBYy `I4}+OoY~\YdXkb9-LGn<* »l#@Qz Z\sq׻s k3iFHIfo0+ۡmW=u@jZHbǽiG P.Jc@!>hqᔏ6a, BIzY>XcʾhhǀIC)4D^xT䘌RŐvCгnqJ(yIi $c<yI{ E.k!+\5Yҕ7;!x`''Y'm̘YP8 g-#䴚6P^ .AJˢ3UdE)Z4>Fȥ5 kvKjfm1eVP ZY5wn4ë:6n;Lh(/JFi-pr&8娚0ܵW`hLj !\3:3֠Q/Ć!IS!M൳v!%"O褊aܢ;Ԩx&cqht[ 녖Aa}b} AYrnY?}aq9H؛lZJ3$hVݐm8iٶkp76רr6]:6WX-]%J1n]Z;>GL%EY/:cnl9W%x9Ė9WI+fEш@TW-u!k'g-l0#PcwOx=)w:&鯁<9b.O,/8RͼܽXǨJo0X9>w,0k/j5{J4a[xUJzaFN!#xdpqs:wIS1ʖ?/ t (^FϬH*g|9z\Slj\dju9ڣ7ػ(z؟`/ѼO#?ECnU3FsFk(5(z͙cZ㶑2/{u7ހUvSeKR*IS߯A4E&qɖ!nB =å; HġwKQ!UiRn/%M5h4?hJNsc7H];/}l(|_>{t> 3[W nn\G5zJl|UBo{8}95;#F(ɉ.<.9P23 @A ;7bsYksw|v>Kq~459PݮqLsih 8jۚk JpȂ3P__Q/K{eUS[N|iCKyD=lhm*]}ְKiI_.ܢǽၼ\^_.AeZgZxPT?!zmjR|?|k˙*B]$q'AC#b%Qi=]ZGpU0] z8@LE@bb8jjLtFVn=69^SD};E՚]ՊDMǥaE05Qji~S91.թ$\jh"aLUPR$ c<يMB> fo# =ejm&qnf'#E"~qE"&uύ ĎP`ja *;^l%p15@ ټ4ts8;a}0$"PR lI_x:/)M5Hk|.--! \CzvJ~SO 8A2 D岇jMj]>C =.nPQl&qڟK]NH?:$bvy*Yr,N%|(S"lԩ v,͝-hIe|)p(1X:!zd0_9ԌlsCq6y`TC7>c4FSSfJ偒oÓ]ϮeC~;.rgUUOɇmQ^A|xX4WϽYᄅ6=o/wr? r{JZ=/L٥bXmWUW6![!g{< \"y e4OxJL,K/j@i-] )dȗ4G%?\ Ľ1T j/?de!9Hyt.%"Dnf0$hմEU*ׇz R F/$F ΛU a '+ !?pQSj˦"i57i˚rc瞜WDwi4ޝn'.!el`T ݰ||zQValihIRoP5`VՆM2ÐiU=$%`sPR'ww1\!"M6UKtfDVMٮt5ƓBkM# xF}D1Qz 5v_$4g'tfR%1FΒ KR2g c֙\7;hTNsx_DX!idYYt+^ $ ]zU+kB4FVzTPW0~0+%w E$V24^9%,wfB6Ϗ%[U/c't1[p[ ^*Nێ_ރwa"}cHi"G$OުKﭺުKﭺ{2u4e6 0@;I2Nd< LZSNXQyK&XK]_rK01T qU8"gyOOL.Z.2U~H>?>A1YcmTC>ctqp>OX1TtW !"!Ͱ;E>b#n"Frk d[ZdύTЂX` \,WQkA~1aNn>Yo[7FSF,6-)@l\hw2fgv0kL`2Vvw"Fg`jץ=k!3V4>t3z)N_-(:QG‰6mPy4+\sV/v~˺v^.BiDD fxN]]PO93EZpm5B߁z.mQ$YM *ef IAI3Trd1g)ɐ׊͈M2~^II`Go Moö 0҃p@G b Z&!vux pnA]Y4/suhO*2eFhIQBiY.` 2޾7*$HB4y/ E5Ѻ\./5MV"X2g| $e|AŇ2h8OPx>yYؕ+s!C gt7z0wvfY&99"b(e(N:c H(3oЎgY!sh#eR˥VYe2E)/,eLw`UN>A){6錃dni<96 D5?N4q7:ػu w+@ btmr{P`c/HzQ/꫽%yq|d^nվ.M(~JzQdJ*G%I2+8~OL!+uH3  glPNKZɲ 5S$ʦ.7:Bu-MRq̙YASec!\]VnΪGzBFdٗCl8|OK}F%*}f$cZr}%Z#GlTQnT[+J49~w[:˽ +7r^[Af7 >-_ s2dhp r ]s$=A`N歐|i('xp7X ڥ%c)i (L@FhRNs텞l@/ESb214kE짷Y\Nq/&W%L&RI ʙ"c*^$ )~׆R(40-RE3.)0rBD&Q_"ŏ8l`c 52Z$PEóaL]}EΥ`tS)L *1;$ft >)(e;=T7s jbd4 G2Ѥ0|' !QRHFqn>Pw&ZҞRA0\bM7m  4vK Ǟ=nǷ4bЏrCk&GvoXa5#,\W.}G|˧'g~2@jlT/iCeK/_Y.6K^cUPy_\E6l@Pgry}uOeEv}EE,:0fNSKL=\Z3Ίgu,;3v}3_>!DlHxr~mT؄;S$ѹ\kg'/x>#WOC޿>Zf%6 4q122V(2p )ygy^#ulFYhMQHNאjQ6 Gk{'d"dMeN3KjLw^+n!Y_OSc% $S3ޅUҚO#+%z:dÏI{BiD! fxN] @¹tNE\[2Qz'/3 7d g}rD3vj.Pp0n EҞb Z&x"E& cl=mvC0 h<g(=TZU/ihUh,64(sWSC0q43khT.xzH'Dj&9<=6n{85tlS(]ED1|{!5t^B>^w/md;q&1r _q'r϶h0:xwJ k7GV[pu&oAGPSt'g~2Q٧z>"B2_/|ŘX%VK\>^/,xBf2}Da 8bȹ2a-."_%SXAlc^ ,I{ZB|I6C! ut=FDq"',( 6'1!@"&UFwm#GȇEYn &&'زG39bK[Rb-bY,UO ' Ȱ* BbH;x,*b!rNP9-R{h19zΝtJj-pԄB3)V9 tNDJF"Ts%R-݀';йw]:?Џq\J #`( oeEQRgPTD{L.cð`ν9]ۙy@b8'DyET~<`>nx=9obO2Y/]ka# ]nOtGDwOtGDwT?]{%OqO09cRKnҲ2P03Ψu.`a;XTz]^J>TT1جO噯#o;n?K;.;|,[xjsSXN^3o3߯JYUYfpjǨl#>;ԠZ jY!wulo!Z ̎vh}Ewg~;(A0=B>a;5%`.-1|+^Qe;xńe}ȳ #zJ5 \%uEY)=šwY˸ׅD"  /s7)vw'kv ؾvJ"QԻ"+7oE9bc#Sz=ӯdIVU}ib橗ѣքr/3hgVjGz!uZة3ѥϩ֏W7ZKغ$K\y%bF=i3ui~ v{>(@!FzRi1nL8EF}Eͨ3-3gƽ4'4KA|^rs'ktKR3Y/´D[p)r\pksB]vc빋}pW&$zNZKz*:߶_X+"DUPDuj)6/8`ZD !i\Dda*a-qZ9Js!r.}I}e AL1Iyu.G޵}מ1N(O/zS^/~/ŗ?ǜУ)ґ`%u"ث/{ڃ+u`B 1cSRUlx`ּɔz{3IE|C[O&o6Bx*bˡ _dF龂NF1$ea,`.5 dz2csd(NPNNC.u ?%WJȂ @Ɠ |1:yՆ9mAPЀLI {?C-B> ðNQh9 =Yo^ : VJe$ضu.)F 54.:aƑ)J!ǵ'̻QQ=FN4!3'Tp%m$yT!͆2 C#f \d¼/D+'b|&N n s^!~(nvGq;ovC,Yݵ$P5FpZ 81E!h|CKFyNCw+a-06'[Wx>5Ѐ*~ׇe@wUM1z-z =uŘxwy y1,z]WΥ~~zw <l-*^Fir>ycSrp%#KhH,wI1-g7,WdYI?!Kva`"=!ⶋ؂.|9Z.QdCy/A &Jvr)5wzi+.BE]7+( ְǸPЂZq\0s+qlSoLc@rvK~ 8xEˇSDz x\~C\!J90 2Fi!%ׁCʧ &y5Aȓ^i9L5m.)vF|LJ[3`>kkċ\@C!>|V +bY=<;FClJPL̙'p%}E>/LQv@(x0>ԢƟzX^Ʈ$M"5Vg@V6|PG1jO5Q#yEhoޚc{'V ҉ 5E IJBg6Ꮣua{ b{#I{Ojsmysk]OB{;r;Nz ; ll K-C^'ls?x3}wA;dw~:k.}Ә1I&U$8xyx2x y6o'syOvQR V*LK^0CV7VF=6?=2xJd֨X dΟ61F)Y6se:*9I)69Ii朤=99>MeڛCz;u ޢq-|xB% $;Wf; Ķ^^Tov YNsγ GZƎ22;a;үZxfqtܯ йU<G;O6{dtP3vm,I42+]x6SR@?#Σ܄G-%pzA 0ZiJ RYK )ﭶrNs']i7NnaGwګܻ;GQ=8(ܳOcܻÈ#1L&CIA,a)g߽QQ=NM :W[!#)Q >[6XW2g)!-JUC(A@6:Qv}Ö^pҰ])1I5!:bfY"Zc9?Uo_ў%QTo%H*ڷ}1ᵊB%cJ\^*fa fؐ=9!`Uk_ oxwK_s Bh4x{6^.?XҎ+SÛQ_BC w!I\bo:U~OymިWRɬ:Xm^WKQv`(?|aHWWptސ nZ++Kr_t]qʞ= kG9[SyQTٕV Rk ՟.87T!HH__եJX+]Vbr.Ĝ+89΀W5vV/S|<7\<)z|8@xPP0La@pLKJbQz95bBt\\~ml+ ӀQ=CDT20-=cR!O/\Avés1rCkINpsscp(O LyO{$`y3l6&{a0*QkAOBlz5N-[kD'=kPr~}f0u #  qt($7ZjS"%bzmS:$LYn<%7D0t8S멶#9q.{u?H*Լbx|PP'1A{g154s!S K03@i5 :* PNY#9u#\8Q*Z `WƗaX8l'8g)aV=>]ߠLUcT;LM9!W[]c=*k{uhsAu6:m!T\iqAd&u= ;X"KqsO_|QW 9Žc.R^}.sY/ %B|]OVc1-0tųi4oX]6 9a`w}ew.  0xg㛠MxA#eyT hՂ+ ϥϩ֏O`/]٢XV6#%M(#6U{%>̓t-P!{i7 {\c~_25OTӍ-'@W2l~m)R!);^!q8LWG}k+0Kc3{BNaчኪAiϘcT5:1N=Qn%9!acV⧃M>]qv{,Kj9c^o(!C[-_jH 3Զ:6̷>=-T䞷Wv_fa/󺥣~sޝ6O^JWR jO,owQ'D2GVY;m{fI'=$C 3u`|yUJӔӠ4 |](3a^uIz 'WpL5qW-8J !PWmNDe7cJ0t#D% ?m7H"uԍ/ݸG+^23kC[#AT{EEHѹ)=b$y뾨V/߸) %8"ڦziUuiV.ZƎ?_[ ٪HYUЅbxuPUEq/ٲ:d,5#tpD #v48. $x'lE |h[XNN0K?. r\g:K|oa4r+}ߺkm0/ ד0_*i\BiEiE~ EX"A7ȿ^ÓV{ӿuT9Mpg޹2FglUS֭truj|wh`!9rsυW0[#nqgC@cH1qBNr8Ȫ?W0I> ADϢ.|jcVŜ$"w*Q9?h9}bQoQTzF0DS RCtyN /52Fj5j$k'ßH|;vëS@p(49I=kFط CFXQOs2?E7q`Ł+l0A m<18-ZpkLcDE%Q⾺ ŬјX3:yquo.eor84|]nܤ6@9͒V'J|cNS%??ysm£ Yݧߞyjz hfoYD_;ng2L/w-,}Nj}lmzx8Iy<a >N(|5f٘z.O'S:pgVoX4t>X6L\:y 7G12gwgw^bp_]?z ٛ~b`ys}|:.# s6_ =CiqHHR&Ydh7͕~ok3 uPWgh18{\y~1G浻< HvUńߢh4tQpD}pdS]4.&Ɲ'Ty_^_$L;~n -/5pJusdШ% Ov}nvٶa< t1r)ݻw<$ OH)7:pǫhܫ̱&+(36˙/^5/̬>JQ9Y)HaR3"\Y$CLr¯Zs[٬ۡLK>j_Zs⒕L.HiqQE%> #c\H9U"h(18*X5uTc3h]  0ɼM;U+B,4 0( .L%`hJ >xc@$ fNX &C#GP0A 0Iޖ0A3 5cG%(4H"0;'ì s$ 9hdw0I 03j 4-Ghj\U]LF"KDFI4ȡ1lpX!)Rk#9kBʀ7b( HmD@K/`bň5K+i|1Q0n3)f2*%1 NsNvtꬓH H3ꊈlءŇҧuV%fdgl8OSy>R[uKOޜ!>AuN m>_B_9un'](PfjJ4M~/e:*8Vs%)G]xۇXjj݅PޟƼQ:gL$ݜ c;Q0 &b~Qޱ9>Ve7d̬@m*U`mĊ`z2ek%Om~EQNF ƢoX +Y|p?sO8܈*. 1ø=3'W'Zg<&לNc:;(21RjщEjcY9(ԂX4TxI9΢*HF`rԲ+`ȗ43ICU֌9K,G=M0?f?]rX3`g&:ӂIC\wNI3}hF0MG3@i)Ӏf|` Eމ0]/̥[lT<֫{jW2/N-h,x#sMtZ<.K+)B43l~5*@XT~YjSݑpqD=/ϬR1v[mp+(Hp$8-]K 欣HKJ oN@N`b0io`68fmN=M-QJF[m%HGMN?HMF;x#tୁک)zK)6lI/=T"k VZ(ȷ0&?5Y+7ӻEQtKdTZ*C/Ѵm]NE)%>œ;9* N|15M֭Pb\."TǕSM5𵽼 rk.̿9bLHbUiJ|%OoD*Ux~ !fgξyOş_en{Z|YJv/bZJq s QJ>Kcpdv`"2EE$C0,qH1 BxF\T( X(D_@ej&)[M4S=컅YΞiQTrR ?i-|ZlBzϪ'Ak:w/Q}21KQ@!i@cm1TQVUL;Y)\[fpJٵZi)/ji 6E1 㧟Xw]3Xϐ up١ >?RHVKǑ$мNSE(5_q!88{bV:RO4T[;tfR]7 +8*$/4$!lO0({u~$fh&H %. &\ssc5@^W(_}XwK: M$tvu㷕 y`aO*y#rꈜ:":";󿭼cKcd</HUE)n"ƉD$aiAJ@=3WWwww:VyA&-m& 1 E{p]~O ♓P{/K_LWl,ԒɩPf6 )н\1A id"V(K !XGgakeN+ӌoq; f*/(D8, Gg:F vHɄ8:):r UF{ Xb -ȭN`5bX91)SWp-DZ}xh "*z?#37g5f|{uQ| BsefWrvw77p}pltJ'\?cTI!tQ -7U`0 7BP) [pk쀸Cyʾ8O)⼞}QE!dEiϣI+T{567X>. Mh gsq҇w7`Bak!ٟk?Ù")QRriua2,b@*0C 8VzGWzK FDe 8hp:h\NbR2k^̒F0 8.z9)MPQ!HBL|=kcY0?]:؍$=AZ ꈰ 8ɁXg`}E‘@1#OQD)d7R45/=]u)P8#ayƜ9[(%>ʨ;mJRxs$wu"EDնJ`N 9080Ah"8(0Ŝ8Ϭ!bm,!8DKukoפ,FĔ(hq|6+Z!{ Ir*e%EԆ`LdIDKĈVMíl9Y{vI?JrM.hxoagujO9妚hyXuR+@lsJ3)"+pAP002ci9^.}"Y@%%H&0)ǷW=lm'U*8X @EEXsܶ*8Q#swG"0By+5{bz@kL#" 9[u}0woo~L_on[W8g.ї(]nn6E"tk_/?=\YzցT؇iGWv1^#*SXY] 9ih-h}:o\9iva+&{"B?HV9=48Dh`$* Yuh{{3:w`߆O.|kG?òtHHF-,;Q ˼\Z$Z7ЅJ~XN\|*ԄN$ 9W*̺U%"mt';˫GWv1#qî;уv1QY:l睡l.=fnc|bLǃgZcBਛ堐Eq׍A!OQun0p(GR&S;Q?DY+Dg|L= N >7_{ry~O,FFpNHTxO^|lBCDJGrpmל/ngKŠ^<-mFoH NیE|IJP/ţRa6w,f04u0/7i#{:0Q^jJTc6y=ڷ(\"];H25^&B'mwb,3ڨs"SaZygW*̺Uȅb㽨_T/}>ne=hգObC\q,5`R;Rm`d NKD*LۡV;s0IdpbGC41NTvp{jǨ{F#Π Ti^!<6,R.( EŔIuvgQcSjZ&dz2jMHn/8,iC]pDz-nj3`",&0(ʨq:s"hkŁ=]CJ*RHe8%UE!wI1YkuꫯBR#T0(&Gf/l9D"mq0℃ٺ۷ƛ%;U&y_ N.-u(ZƲm ֡BT-^#>Y:Y< ~/N1tcq1N>鱄HG]p 'Q!"= B^YlwSxy]jIN}"?@,Qm-IX+Kұ,Cy5+}w9Dݦp7WB;nz(DQa18&g&n3 X8V3߻+K3"Տxj'A3_gʚƱ_QeL mK+ΝꪞdvHH"N_Pa@b;qh$\fhN!Fkyeb7}M45ZIV6'OV 4mM[mnU1:U(ݚg96VCm)zB3Z[6e4M%6VHP0.F6ٔ F_HxG]>d $ș|VݿŰJ ^Qݽ͠rkQ3 ajY ]l]ֹvmö1\Z`T0Zg 6׏c|ț5LެᩐݿNnx V=;^dU)8v>p 3kqPAItB5v<e<(gQP[ 1 TL Ĥ:Kn2_U*'&[q3=u1 qAd F,+a~Yi>z\Q<^M͜k򷫛pq3G7ۡocnc -?^S ux-"ӄ"( A)66ae&x~P&\Q&)QB' cIAH@ʘXD8L HR2(L/3p8'@],I104Uq :IKZ5oE<6/0jډ̑$RH!F-py64@NزO5қwOgr>8q4z rs7hJvѫC<DRIS \((Hbf z@)*4I葕^ 4|su>3A%oʙmO~q, &d~IFv2 l\Ti8 7 V$JEWW_܁VojO8԰€,=&M ϽG?)ѓcϦZ9r%]v .pl?QU$OɡȨ"FuPdx/O9꒢Qw7!*7uC)NTfH%>X}Fs0n;®`pmb/#ʡK6rƳ+5.rOޮS{mkkʡ ;ˮiw 33dsӖOFL0%9,6~AeZ}уyǺ4oWw*Nk,Q..yQƽ#ïj9!8N`v_{ )9Z+z|\-Wzy5y^qNǑqmܗ-!f7$jL:zZSVy3m TL"'^|\΃mB82 7%T~xCk"Ufb"Fke1=N1?KeDMɄEkcrN 2XT(c6enCBAWzqz+lMҹ|Ws( DneXfZgAeDn3`p)Y<ϯ8Ns`B1 cćP)k 38r`F ̃gn'^Ӗ 74kW8tXgagp\ 9 |w}\!QZJ|$OuIw^!""EbW#%IEʩb@vy)հńI@Y тs,0vA1 m#"TD 2‚!G9%/ Ҙ56ϝ5n3>Xo~I[R0F爑4G|2% qi%@T0MiP0Ǝf1rSLZjy(8n[5-VK0؝9-aK.1R=V)]N [WPeP )Kq|*;_9*Xѭ`o~㒎%Ѣ2#ŽQ^X 2ʎlw? g}լ# <ψ/Z/n} wnƓ_P%g6lK?-$ j[?-v:wwDG/x8լ/PE&W^.*/I^|'>RJR:Čs=@ gS`i fA ǵևZ<&*.Q1Z & 429#-!.>i\g?XF2 ̊AHiD>F>?bm3Ba wR+RNb"YSV JږѫAKRV`N*B| 6/|hO۹~db&qz0G|+I}6Gك;FϧU،i u- /vzlv(GףT:ڼp򤟬(j=QP 8arGA'!2ñdIw! +ڷjޖXxHl .Tõ4Ol@ScaŒ79u^<ͼ(㽭|՘?Z-1:2ib6K܂Z%c~Px|*&b oln=h7EnJ>ʹi4z+8ge^Q8nB=b mMՖ^\1Кh}SmEs)HvQ0??b3?>~7Jjf״n;۫wv%oD?~W=v_fi%4>2b1aHaPI $F!XWۗx3vIAzXmGYKZT[Ur *;ߐr"v-[j|SNmQos7wH}'7CW@npڳ`mYuk- gv"5[! T0CwiFn~.VƐ0|oiqݗۅsq:6O"NH3]HcǾL/=R2=?'gK!|5Z6-Y2N i[RhSL0k[s2$olYA]lF@߽1dQ滌ۦzl]<Գ΋10lEҷ3xLN_~Z5o?dHcz=yR a\\硈X"kmP"B&o8lQ+MC,lLd_(YTOU2)wY](dxD1LqmiB$$a2qPQ"*a4R)∌}ϧ5ߍ>,zZ>:лQi9"AIo(YҟV1Gx0Ǖ?z ەv)sԏՔ,*D,PIF$q?;r)Fۄʊ,HVZ N; DWj*hA)њ@1 pjiZ tt73(mq^NĺE(CjCMS\ Z}D$4WEj mW.ziޙuZʹ6) %PoH+W F0"X0&4"4EBI0봶UJk:S[rm}ZD PٷbQK5Rab_.!zv/d$5~p>ނJB }r!tmlxZ$9X i]1lEܫ;՗d`5d&xx{U Q£9`6X\lgu |6J0#ںJV`ždKrE;2 "}zHxΚ> aNۼ?לD4%4"ԍ.3x3z شF9IW6o7@&KJkʷ@ mm\mEQAsA;MLM#PA9 6~dgVy 13WGvS| |ّ@kn/k:vJRNv 4Nڽ+x9R^zkRŸ0ֶ?o~.G#cCWq9Խ\x [ؼl6]43|..!o>e}hb}3n03o* wİ͡Bƒ޷^d[J3-ݯ'D\pFpSHqCJ" %^کmK0b  (ٻFn$W 9ELF*eò_$ۭZ"dw+`-TXE c`-br>hNhC)>y$8)I6O7O' Ysm1,/V:G`ѓbsb SS ^1'txP2]NV^vO`K+l4̋aZ T0Řs.L0C ǻ^`<,[0bA/=1uk]Bt-xAzW`5iͫkuj5i͢ T֗ۗlEϿ#}z1`vя[FK0Ax:Qrgp!?yX>n秛.kc{7h&mpi5hu7Hn/\`JX`CTs)#QڛasaY]dV&ՅIf$>cl)V4 K)Z"P@p0Xifx1&TВAא2SΖQVB~vS(A)9D,:%RKǏ|s5$I=qJ%DslE')bQcgߘg(ԷY\!$ c h7kTR'KL:]x3g%A ہpxǹPKD%( 8y_#5[!yoޏiy~ZFaѤd5*aO`pC;Ska~b'4Hg;.%'d%+;ryhsB/ *F"ؼP6IXe>X6vBA&Ǣ  b KWAm,Nn4GMg[^hOwX73X O` ޖkv]6{.m:ȸ8|t=_كySq##8J$:~$~;h'?k/>ħ%i{/dWk\k;Ęɴǃ3k1$4fSԄa;l1/ev8B믲QZy5Fy4YYr2yXVp'Ukfh_1 N 1eڣ֞O(BM)0+6$P] 19S pdUU:-Ҭ=1TWj{E!=l Pq*,F`VX+4-KY稨Ņö5i%asV80yfZº4R3`o~/T.C5%Ӌi:} y2ЬwT3Ɔ۶?f;&vf }7Ap 'tWAO'_lUM' vAUo:2¸ma:x9i,|RӅTߙkoۚT!lCBvMḖ:-O 95>E ǵL!l!Sʉ1/(m8f%a ` qVbje? %G qe:I1AHG dq3c4hA=񈡇Yhx+ # >m_=j"" $ RIkJ?J+fqb.* La9cQ),qnG|O Vh6|>| JJ%,/VEJje8cUMc,&Xal0b̑EԁUee!)Z*jvJE!*Y*mKZ (49#[fVm7E8)t("' %)'pSq d$6Uu X%QEYjtSunVZ#0E]G9j ((~ljC !VK$6eT._=䴥OqVDqlЖH1Q5[*^Վ(+AHLtܘRJQ|OGؓ^l-Dm|e7g`e3x<g}SB7w-|!zey5Kb֜<ҫf@XGq:A u[>cbå:V&Dp|HjQIP%Z%TA [Y8+\pTU:% TaqJ)m)DyXp\PwN Ċ,O N`DZ19 8-xa=UT0v@QQ`(ټJҳ&>FsE4"JK3Gp ڏ{J`Bj&JffUI$hbg̚JVv;)ȄplY*"0c\)LIug4pB [1Ӽ])ȁjv[\GӏcWSDr+`F@h B }Z'A5Na)t+E[ih+@w I9- Žd<Ώ]3~\sN*csCX|7-<#=mzBhsH7`rl k ^Ian?z3K xǤ!ڦEzSƖ|º{}}Jٸ^ˠeI;ܤ z e&#I-@*B7]:38/?İr cA6:SϟΥ!M5ԑ1sEhRv97S^.w[ҿ-_/*m؇ug {iuRsD<`T]3bf8C3 JQxrWoǕ+*4k!"Iw.NeKLZ¨1Ɛ%ەq6'_p"x#~ Y Tf0t' @`َ\m1x!v:W,4鉜PF{x \|JJ\Y+Z~˧18gF tSNW;`,A7OKb G퉣KKɘ8\< 0L1V3 yU6 N4߂ZFfz$Mꤒpm۴=CS ,UZ5F}uf>>k~wmA+*wz!{Jz殄%?=ox,~e3?) 륿SHDRۀNQBQӫm uAMi`uw/߄xICNK)eDzm$W=PK;1a1fhZn{)BH=Jd7ljy =zjc/S(8.)b_ZOdsugs Ma=^ /X/lTI 4_gč&<9M*^,N|{}RT-nY-E$~Na86%m!*#r(pjo<I%rsإM';wBc]4Ls?KIO i8˿~Ry7ZH+4 Gߵxst=zp%9*dRvb-\X"rJd'B"Z 7I  JĂŭ!xܒ1f; ,fEAġ5-]1q{"V#^phwD8XM:?^`Ҝ/MVDbVL(Mox¬09p'xhI/A鲂Vfx[V34X)AaAn{}.M@Bk^^O=f.7ї^|;DBKAd f49Eɦ 2aI?INF'Tlx# |xS 4ӿk)Q#*A?GVuV_F4ur=̐Iwˑ1${TVu+0#0wl0f#X9際:[ol$TRQ*pzJi!9nID"}xp27r(fٻ޶lUљ1/LK Kbwc,$ ߹$%ӲZ,7tbvyj3pܤ2lP¤&4$PptB4r^ԩ/xQ=l뼘Ш%nNͫo-tv ˖~?0/싦+[beotźN$lka1;[ba)+Ep؞"(v- CurzPx65aoIæCj"ߡʘav%Qhs\Lt3hzRΔB0LVx0wܪ]DlCU {C8Yw.ؽkH"0i@CM+ad98/C-M٠{n:Ca6͏ÐW3Pٽ'_?QZ0=6Zb~C+!60^ YhfߘB"oBwQZ,OrƃV8) |.qv;iخQ7iu$b(À!< 7burۀ9[, 0 ;[> mlAR9Btz_ fG ҀU]@כc'NT[Em:WC58 =LBNk/Ӵ[x<ٳۭpzIr((^}ӣv=|(drNԣqQ`S ;­68UhQ3@sQҔ}[9\Yr98!{E Z>޹~߽gLV,dL3m:5p dܗ+&*f^A^yl5K'#1KZ^l=:qǣnFnЊbvso:jN |7:9g/V ww QypWg?+*nR)$Mw9$߸}~ .8f#.e . 6)ZO(9vljT\Q?:sl|b`PH\^Nσ 7`m˵vNe `*Sq)%Z W:xz2}T+yCAԫb"㉾o{Li[gũƵ~vU}M[Po d8gJ~:~ϩN>;~<|8ݮeb7ֻÏ .O;pbo|v|ֻf/^_wc"4ݠ {!e9{ϐTF7j3jKdE\z^ĥեUU#DN<4+GR:M) =̪"+֓ڪRu?~g9HEr|Y m; @_53AWSǸ:q3245 ARȹ~29iSU A\-Bctpy/`-)Ȓ*8fKf N0rasV87c+#"eABVn0Žm%:0 גshyFrjajj#6DV恬&vucukYEna`p9@P&b0H]AEuPsDYRRC!+\yHK!s4T4$9g *lMH!9MQJ4'*p\R FQr rk*;ͬdG /ց )s_Z O"f`NFiB ubCFh `Ȁ=`0AlT8@$PqnJB A7֠ 97z=,g RLjH 1R ^c^8Y2:;,|'[ڢ2UHc)7ߥGYs,vfE׌B}#'3bv^v,:-?.׎lXwјOGD3)=ٛe@!@8)ۜ7O f/Ny a0 ' k @2rlS.\?N9qiKt_{=*k^uQ5aq>yHeKkW:Aw^I;{OΌ/mX;yENfJ&hEӚjUߙ+S+ީ/8=xL;/Ƃ3qNT3Tא)YeϽ-B?Ol ؖ) 1l tVH1NG7SC eIs/y #X&έs08RsS,J%N8ZG 0R.CVq+\b ^i#xc b҄`$Vr!(ieg[g[bV) \!$ނׄPͮd{Ly+Yygw֩CKvhiT;m|n9v-|c!Boo(16Cw %(%ݵ f cC 5ᨂP_m͕&cXLVn==Nw nݢa,d1E m C_Ӡgp6lSHq`[l().zgZJ.T +"vpRegČmY>7qtF?*r?D[NXR[@R:wjchCRi8t!%gW~4Y $f%*Z^ُ24`,;ܟIԸ7E.i8nw@ ŷZ2  =o;e?a:J.[,2rk*؛Zʅ)ò6^Eit^:P hw@.`YSuÅ8r$U\ÅF yJhh&VY F\pRPջ`L &t41 U[r.6dc컩7.8)a8Ͻ Po&iDBH+bٴ!'I,l y1Y(+:WS.h 2!\=KH(ȖZGpVg(4*#LIByWWUj+1 3,l\KČ(P%̏[XZ^$?k\|`g%,,$~~F͇6;1INcP*՜16u Kc >"G;OEd<<px6hT\KJ,yo>V7sG4-k|t雉=Z5/pN}-c<} ?Z5PB!2y"+`pP嘙^ daPExdxhsP񜪎"?I14e+h.Ug 8\#{asLJ HKE&g INiƋ~r:kJߊ;Rsֲ(ΐbYA*Y@GaeZ*5&cŚ]bbQ5X|'`lUN ) H F}m;-rd,vH챡 Y4'4@ryކ6la;-oePrˬ.4%0~fΉ  Q@F`?I8wY^1 t?23i}V(ü[+nnqr (*tu4)޾m֔ܐK+OHÌBp`db.z$C_Gjr76{RD$:2/$sfCCj܆Jb/Tq /TQWQQ/[=D>bt x99{~s%^bBtv =#R:J_?׆[|P8m>!TgWT~sʍw@ֶyGπݎ CU$UIIu zƃ?u.&/Eߪ\X8ӃcʦWe.)PԟbJG`t _nsLOWOf,~|qi([GO:JJ$mzP-vgMI|$LIuvřɊ2cVrCx)o.DjWmC1?]k *RJ^B!q>@5>,KGp"f{bHkA S#P.b'K 2vQ)Akԫ &hkp%π_M×2_߫oݱoܻ;x%ӈ3M3͉0Z\H@\p[Zh?dҎ#씲YevAKLSް?}!H,wqAL$x=ayH[l=b FRb.ٯa!pγz&v.-ݖ~hF_oKCa~[x geR' &=}z!}x%Ͽ}U>OC@pi e50n? <0AN`?HX\]$@sXHHA YSl"") eɈŌ%,걖P øz[Bb(@,yaQ戢p2oèPaX;GҪWJheo|!  Y' Lt8SꝬF!Y`0p;r^TwwuAPE_r.z ]Qp{ dyoH. YJ$MJKX?͒]dz@θjGc5 Nx.^e@hW96OQ/.q22㚲_՘%)]ZA0ApyFPuѦ? I$$ƈ~S@B3qsBI*:*Rfu#;PRv肈xh*icO^R L,%%.)BMs$Pgf+'T^׏;}ꮢ6/#8>_ GF=[QY*AI[Q g;rR>J0LUǵx~sb\4.V`Y~-dT5oj@X /#4%8 … ȅ2j֌k4W Ms@Hd1aH!Fq+;$R144b5Ҝ 2e)j 6!:9VҋHG,/)X"MpE) .@)vH7ɛ-JR԰lܻ;W+]*ޤ.dehk**\!jU&1r (+Z C:'4Ww1- ID /B+0\&Y U1vl,4)έyD@2Is@zA7S2"ظwM:'1 4ۈP;y[5#2cMF7_ȪMr(-n_YsNnukjC30rܟT NS*<Z#:!cZºi{cdZ`Y8 {O?v쨻iXX5]"ҭ{{c6\ #xRPre5^AJ xEQhd婳@{S(E?CP2mݐآ7ѸX6;(R*u(qVv#C`!҇^Ә*e7й'? P piy\v5"`Dh/3$z? M=d$)+A+Y}j*iJ}6MJ\>udL`ޕVL8<8i "L3]f m,v`LHg! [8!; ֝52k 08_ G 8`(7"wtZSm;o=TńՔ$f!2CL ԺL+?D\+"`Pl#F Ra]+QGhЪ+j -^鄦:941THA! lA2I5 dP voVڛk1o[O1q~>+(QT1dtQ(T^ PEArr39-+1#*zJuˡCDF>3$Gn+{2 }R0?NxE 1bp7嗧gjhb) ,#ߑw~~]z`|ޯ s`}Á%:eMfK淯u-Bw|d~M>G0%HN %o J(sI_c_㞗J%H,FwqAdClv̠7 dIנ^=^F<9(PGc8Rmc(h)q|`{i# ;L5Th|$\$w+dՒSrspWf9]h]} 2Z1Aۋ Ʋ<+#9y`/lܑP0EFt=BruV CLَG7±$Iɪs`!Va{=Y qITCw_*}cާd٩/'JY C$$U%s ˤ yq9W<#T ,Qbtf>/~df20SwipX/i}P"! 6,p1@tl %U\֎A6Ɓz;L )B}t:/@̄qL|rA8,/$>:ޓ;0ȳofWVm(Wr4&7N3Mu i }ÚOa5:D $[F)6Tn\qUau,˻;nuC>?3._Fwz3?&vML&FN |WO2兽LT|9(*"s,\e28#Z(i#;UKަeM7?Dq\`МX E!CQg.׌B{OE;FVy}mٵiGd[>.}Bgg?E]$H[VhlҸ"іb 3ƀ_|wJa FCi;!X !!"0ٻ߶$r7Ce Iv&Hff? hO=OZ @aGc瓮QX_Tž_S+`yUzJwc&->rrm!7^UqG{of$Fqۮ^Nr$g{d杇ۚf!(Bb6^m@I.ަM PoJ"[gLVw<i}9xڶWOmʚk߇,7BMh't^Y+OQeYi@}<'1YX;eQC`GQj-Eޓ" ŏO cY Ѿ|qFnC|k5p0pS80Tެ ?y%|qW0)TR;EPfmf/tPL6sw[+w?}ľw£AO!{߻iü$ĂmVGk2]+TހhGSUF? &@ҝmkVE$}y8\l_sTSXkhA>xբ#%Hך hFT2Wt*qRGX آ LsuPs--jV\vY/֎J<~BHaY2l0zPݛK:m*28xKʴ Edv0[7!/ lpuciGDr4Ff @ک$  s΃q].ت<agƱ;a0%Ee `yb2I4U [ӿ<\e4'v7R<эsp޺󂧚 88dJj ͗lͣ8P%n GNRV6S>e 9-#'->C>ß-Vi %P4KٞĬk/ J8IDI5vy|TTkrs폏W.s&RnLc~4R5ML 2y5~rb2xjdwܤ-昼b*W2Vz /%ue*#ޟo<<)׿b H&q,Lxk%-]Ehi8f Wξ,te DdOvhja'Ez<_ŪD6R>(׳_0G+niq)w\^tli^'LvvA[{1RoZ<qqے9؉k6"ܘ~SY~bj8@(;T}h8hj GK6ab=hzz;2+Lv=lյXJb]+uX2rƶn%hmŚ p-хW "`9 ?GD(]4OG||7Σ8WAF6us>R 6;S{R * .3Jh j!:4E7e=oÊ}7C]h SӏOIUɢZchpGNxNd*"_F&r yj S0$90']qH:SRf" e2 "4T| h&U{RL5j`ķ3m*3oN>ynPJqq%s>ZBtn>N{:DkPB4(GO,RNc$7ϧ0ϝBw[LO Q O!J s9F'5- )dFn:IV^`/3pDJNlN1i)&o9q#YȵTD0̦472+2X&rKCj!0 9&p%~dN%(- Z ˞ j3hĺGN<Ũ)R<ǜYnҦsf DuRj=uޟ"j&v2CA@_*s8*&_>d4;(X}1428.ofҸ+{~3|^;&.\ ^M'Fݣ_EX*{u/dx L$I΃LP9SZ'Yb%7J^"EUar?tx#ln@O|B^[$7fY,VH l.TL1oC$Apu;d u g %ok!CB O( h,Y4ӄ" ǟ^ѧ,Y\,~3JEes}Ǩ񘮏nm!Z/_~ `܏oM4b!q#J̗.Ȑvvae,h`9%E-?!!Yx1fS]@XTb|y.Z@̺֩OMJN8aMɒQ>5rZYxx ,9)HLgȉJMYJfkJ gU׈Fu:+!hj{PYjL0!r1I-Y@!ݙ SZR#u6)i&8v&DFhO)H&s.CN@{: Rr2f׾>DEi*̑19rgFcZ}baܷ|GDckD+n 5B'S$ bX+|Z]P!|b-ڒ)7"¤PP-|>n)E3ϗ8٤<)lH9,w ꂗ#<q@`p Ņ*<|rh6NeU UϦ&="eQJ׎N B[-Dq6!!%c K5ޔ4eBJ : +dt[7(, _ga2NG ZB B.RiRR쐔.RnD"n8a,76hY%MLf((L&L %s Y5ؘRgTf ^z^k6.QMڼ|f9 v z/k|m~PЧa功h>8 nRg[ []6p 4n A2f=s4̐DZ_s FmHw-4`G}[HsU %b"߭)8+jw3/jVIGYL@&R])dv<%@ZYhD2ݧ:vP1M8Bw>G#l%mtJBZu`oUaNBHX@.{ߠ .ZbC30xWE'8,ȶ́=a{nA֌?D 3^umQ.{: 3 3Z !8ڶ>]'T ֖| xq't kkxs+ebK#1%i7VԼtPL;e2 63h=rιGmG%ȗn4Y? {4z^ҹMl{ȶS(ɽfXȧ*Xr#ʉR9UNto(%ۭcAI+"B 2%bi|FvyiAp_zR*c+fS$*o ,QY;’6jILӝ WJgGO[ ѻ2Dy0E@ziζNi0 6HZ ! #Mxhu\~&us֤Q:.#GgONF\xMϨ*QMdhjeã IqD"T1m"ݨB\Z'!mW<:v0SN=jޑ²8`z"m*Dtf:U=(gVL8sR (f(,mJ@(A&dʗv GK*+ [\i5`))^; vx *xH6Y{g](S{ ;n \a'a,%גc>^5Zl9;e:gEdtF3Gi!p#eYP BPspB0恀 "^Wej9nUs <58 lVJy謱`JK]QzK5#D+_6Ұ/1. >M'c_FpNSrt:4f,q̘ W*PmqډͽMU OZXJKEd\ᒣ 4FMZE65ja+`)TzR])5- (%?6^?F2[]5fy]UByQR5cMc-n}/ XDBbI4bi_ljoSW򄉌fQhYL]_@@)k-8 u>[ЕRZ '! lAl4\ESt kVx&)>ۿ{Bfs9sJ]ˆ;@WZ˥Mi+LiE e@m =].sK 3M;!;svnxqQMD(Gi}szH=i>m>hPB@휆%<)?3N

F/ރEYu>ٙMAIfjן[:[b3E1B C-ص[4\hCQ܁sJ ^Sy'\匷܂tS&HjSގPvXodd1=tQP9Ny HsdW6gQ-wt-6E(PtbW]^l5TAzpԓ bDֶٜP}dK7@3u(qXY+-0REivQ+f<ƞkx-]j}D\A}ޞ~|Hq3~u6p.{{{B, 9ɝ8- U$L(l.WNf'w\Jjw2/-cPy}Wovv]O8fn_AO,19om7T=˵qn@?.dn(dx]ܾ1Ɵ@r SAzor|3uAN$& )J иյ)5I"sl=ẻ.⫀Gy-3Be|a.of" &'qEZv)Za^?JI䦨F`ڂ7QI3SѲx={ϭNح0vN;L֢ [ Cj|/7V#J[1i)-]I[dC^@gS4Tȼk~.]qg$:) Zו-i m5-Z5-Zmh[h݄G5ETCPvy2]ا:¡mvV84/eݳ։F}LvDz,Mzzp6w'.Ձ3d9w4íh;1d66aZeೱKј}lDkapi]uc p=|?Z^k`vce 8 ǧ!,t]D(3%ǧLAjaQ7n[%vt#Pɤnw3nPp;)%P=EѾm>ڑ 8"BU|UzzpY6weiMy;P&DjQ,~1hq8: s`/ !͊ &Y1a0mNbSb$Ö`:`-Vlв*qG+m@NqƪN@"&ˍ>u:)߅f}P]s4ΊTx'>#m!T)<~AjRUJXPOx쨉g}ˆЄ[(M_6kD HWZ)F|ĔEVj`KaTM-LRBiOiC ŽStO66k*F"A .C\y1g]h9w\VJ*l j[pk$oan߽_j>qq{~?Ktk-ev2Tm(JB|UPF)+ ]UZ I:XZ9?6oI&' 9qrIJb/hhY((|Qk To [Z:)'&AF|F*&߬N2 fhЄc"ybXRѣsZc..s9j(ׇk2`m@wp^o. o{p~]`Lt%ϨstsC w NϔPj&*t(*@8uU;G" jS;Q 1?/ן=2k ;B1)FmZl9L&A$L)x1OOCX4Ԫ(ú J6ӔdlygW ӫ+ Oq$JdT@(5VQmJ㗧 e6Ai9i|l8M͚ņjI.V2.3.E^D`* FԌ$F,QFKш- vEfKF8sF`jG鲙fzTbO}NWDRm$*I4kB% u3WpW=rRͯOe 6{雧)]KN c/_쭿RDum/!V/sIVJdxECSHO/~[98#+wLp\t7¯|WZ9C)b+$Tt!Lh+Ke[@*ѱX GL˂ݍY'1HÉ QrxQBף,@1.۷*xZq\BtHz=j(vn7 a1{JA'i0f_>r*B2pKVVJ3!L 7#Tv&<3!fu5 j뙫haW1hy4(Fh,jGɧV I})4 }Os6<@+cwE jn;pD/ǡ[)V79]TdBmxMk1oOmH۝oM6q#җ.i6WN*u{kWd?\R* P"]Ɛ C+!47 2S2wa 䮜 ʱe'3._<1 %B5~|)<#3.;<1+vgtғw9Y[/k8v5A|e3 ` !NY(DvuUvrP=4TX5>*O*P6em;4|Ic)`MKʑѤF.7CKDq(nZX).4{ P5~3=̱Ճه8%VLϺQe. )7, zzpH"5WƅNjJ5MSDٜuTT50NT[`*CP7Fq}泝 7W"-YJ9$b4:_::/fFWc%"-y&AR*7_[5k՚4.BdyMAPU,.3&K508Z]+eTү5paBπZ,#֯rMg &_jINUB9.''-UUQZFBZ-) '\B%%gASU7jj8sq1婊 `<Y)D'QZI;bS-z{KAVk;ס+O =nV&\R2ZR$A#0&(4&WRT}mګůnL;8<E :$KNQtiV#,&`&0DZ+.*)b*2oN(A#2*M{}" 伫bwUi 傭Bn[47zGl6neJ67\oqN{$/~Tl8s֜YxkNx<[4\&\&v[ݜ٘ǍX\V TM0bXCrSw?j0GgtO-%{A>=koC*R* mz~um=iZ[G8;}JZtМ֐14:<*t .>uPO }H92n=6ln挌SgٞdңaQdO|OcyG;Ï~{r[bh]h}3ID 9i(8:8́aۉ`k=$|s1*dz/G&CgǙvAF@B/9eT҂yshKZ^5P3{n=?s=mm)#l/ِl`" i (&JZW{=YHYdSqX*i0$LJ5Ϛ1e: 2Y Zgt ^TSI31Z*liP iH04D&TJS'08W!y?簂WPKz]~KIpxȐR0{&c`G§8༶tF5Uo'˦锒Ɠ0E4%rqTmQW.T5__G#N2*:`ޖԐ[ZTCCG@'/y-p)hY YgЋ\jiz*(Nj^W;uj2JƋj%ZF(s'EtII5kXҧW cj e@KRcb&A6JL>ܬ&$b_hm9᜹Zzᖝ(¥}X6}G5ʜ8*nG;?8os 8,P-'>IP ׂrjQ@u))3IZckNmyxZ'2{q֥T (D3 (eT_ :ׇV:wb%4}v?Ɵ{Soit2d3q~ *8G7h]&MɀQ5  ᔾ v4C ]UZ+hܘgw&A̅`D$L'~_L~,x Z ᾝɐ].( (҆P~\>NHc+Z Kp;.Zeodo&RI ^$JPER0pcFpV j7q.IW4N3tӊ6VUV%ƁJ}([0_Äн,&]u\{-.gkPkEd_ ꦨ:E~iWVnaǽU: }+uUъȓ3ɿ(VQHݬܗu|b`5M|~qP6ݎъc>+3\-Tk~ u=Ȓ&,WJBHiBsh%y8@$uI aD DWGKͥDlXp)>p&Wk-O Sڀo0Skui%)@5 i.e/fqTipM$\YCR'[J` z`v\[Qlɬ&ݗ_g?$ay-bU-9LᩅkIzd\=sĶoUdjxz&S&{?U/Ng!q&^_H6>ξ[d؏n-Cǘ}v/ShFޟv>nt6qA,AF멄.fR=vU Mp\P;T ]&cGxK { 4pH([,6hc4KW-eA<.~'hB#a7}￉~Mo-`$14a%] ุ9R% &HC4$8Gϧ o{I6kz w} |7C61ٛ݅yC_/+8l]| H50M'㾔Ot F:^hTl.xtB} Y*^1H*Xo MIvc !.LƂĨ$8eHpxkzLPJmoӆ(e_DH=:OۨD|njwMlH)pen|lqZJJN[!*}W"V\*q )5MpKR#л%C P #e(2=)xpl]# 1)oᅨ W8 4kVw2>MFZ/M[TM7͟]^ AZO 'zaHIfY PcLz`$ չ{-ckORE׊DJlq+b܌';ψ*L$Z +HV!"u8`\ l+#A>)bs΅o dUnj ~\+mܾ|+cܛ-2n|at9.z#$F%L+ WCwr&}c= ћg!*|`h_ιk掩c1uSWjq:.QH;;q R.SAd4t´pG!l}:AZf|ul4}JxQ3JT+)ujdݳՅ6HnY~*5(Yy8 8~H 1&ALv[z< _6@_U<zXzUi&nrM}ΟVݾdIt1~1Dzx ]m2 1=cxo8^"28&;~P7U%wq ) Q.^T q+5e.~i+v6֜w+J+e6R`)ڼ؝Țr#R~-45qivΈW^5E(f ey@ "&K!`vf'-0BdPJŝQ^_f,Bp* ࿏c+ 9< 5+{ cK@}rTTTJG--þ'K$.aa%׀?kƔJ@; i@T*Wd"Q<%)gU@!roM 'rVy8 l 5@탣%;koC* R(k;5(J Jܸ3KC`>Uzk&YFd?u-dBh! F=06R*η0zS4ƍ%= mm]Ҳ@ZѦNېuuXuV;)WP,ۇESkdO|#L]onPwn3P4#vv*hk4T֖G[V!P%UTr| 7= 7dh[\A DHؐJ2/ؐm'CZB`;ݡKE$,OnsH 9jc ۂFX1L+;'=$hmFp(Օ9NspR&27I pc:%qN0.utJ(PIDj˓‰:Qjo[I.IC'* 4侷bmYe9Y#uw1cUg7+#GElJ~^h==O;0xZ5JSdfVVe1_A2,n,v~8f?(}\ԡWq&/tӦNY9yOps9K{4̵)}cیoYsO5o>AT弌IGhMփ97nqxR rLMۀ:iSjh}{ޭ MĦ"֥N>2Ͽ#A];y~CjPUM/STU<@Iq`Ͽ}(S' ,; Vw2E0wVha` p3 X~R*%Yu^v]?y0rdr:LՃMrx)1zeIk{u]ۢ*fIGj$HggHn"R5[D3TIxCJo3>0ۺotX[-;=?MNB'&gW"^=N%I } )+ŔuPY!&at!r2B3O(iRugu`K_M7B+)YfANj4AGP)) }lş8fyEM}yxP2ݷNj.B_'f_e/\bg޻)lj=B IJb9?4@Y^VKԕ]t!}V=c~YL.~vv*f1ݛw~Y& ?蚐rT%i~T,R'^rN;/VJ58Εb9Bk%%g|,ׅJ}U*&)0&ڂj_x 3$e,>sqrNq*Z%u3\J\~rȦx9mg 1bK_Ao&cDa53YnQP^%4"{:BNmYdy n![oIY-wAF6](hT~yZN-Sd_ TQbqln:`ٟXOhx0>=lϯ.i)EwxwK+X44TSbB3ALt*8(F@tQ;IOx o!^ Z24ۍ ÈtKSUey_lē~bN_iyYj'50CMS}Tr]^GDRH(8Se aBׄSk$G(J !q'2b$^.71,.*#s)H5ж`.4*G%`aygW,( g+Xd>Gy|1j L ':e4c#O8L: -U^"5}[RrtnFcapdbUmT_TC9ܧC,PmDLZ k)P.b=&Z!"+ Ґ.&ƵPҐ~ %PzIuI l -ƣeȔFz@zR r Kфpiҩ|ɹم&ZbSv3 hݻI*w+ tJĻ.J6wqGs[hM/tMmҭz[誴=`a쒆LLf/cXSpwl Lؼ;\rŎ)=cѥB A0@nTJc$RAQChm7@ ]Uhe12 (e‥~fD0lcR9 e*o 1HZCl ZR.cD%u(,ǜ*msB"8HB`"ΒaF*ː#.(rW "H=U 3"1*b3O, D1=KF rZ9*gm1M_Z^~8>}L,7=1a$b2mSx?H_yk$>N>G޽}wO "BZv%ta4 H~} jW\I߹] <&aN'ׯ =չ:=L/.q֨Ͳ7"pęDEۯޗ®\QrT!N2WN3Qx"^ Gׂ{9䣥ٮ|+B*pQ$cB핬dz2Sl ]#͈d&|DadBaNZbfv$!`C6hQ&s[oǜRp*x@9x ќɉ8f~1Nj^Cd';J5J.Kp ߌGK)pd2$2~ֵFcX7Coqsׁn2޴H_!+m\J`{B&S!r1x!)LZS'pgNJ̚sQ[2N,O}xwb9W=dAXb=B:n+=pދN7I)&ZC4 ZJƞ9% !}mci #FvE\$L!0cv.-|xu,zv|&vd}1Rqcٞ8L1X\3NZ $D"6g,nQAC|+TxRj=֙R5e5} ;a s2HvJi Fa..4SNWx#JN57T@ŃAI~S*HGsڨAx)?Yֽs gp1&H%-Qyǻ.]ތ:s3{7M=Wzݴ0|vSc6Z3gc4`.㇆hG=>''6!h8& ),;vo- D;dΟ^P,-(%h3J:;FoWZ(/|w)od4Q%އ`!8zp-nCP l,bn69E3j'T΍dУv}eX',]ù¾XURIg,;ې`pQzW^j =;`GǺE;ո\{!'5Q#VMn(SИ̓ĝ1Ø+ݻr은zF|vc04E4rLb >zYgk_>Q {IC'8}xֆME qG,$ wRn H;& +?=W:8:DU9l(kE +Jt"|0A%;,A4DW8LܓАA8^Byϻl(#5Ex)zɭ+FJ@au`x99-Z H=sl09*:RIy X+(2$4z;h^c}$JRg%;@hmP0{Fx4/̨:KcrWHuKgR3$9˘.1V}5 uSCʑ@][o/|i۟:ur1Wi/jr*Ke#6$}8}=L_X.!D!_)XixS5iNdн nK>ćIóefu+{뿾.O{Ug&+}@AO55'M:qU:٨2:H?hJC87[[^k<\M@3C.(tGPOV"}\'wu l5B/ֻuX㵹1v0z>LU44'+I V'Y?^ L76b&O7AT@ Y~}y\ͮ)FVNv`:d+T֛z 2mRm )eh~p7AH Croϰc*CqP?Sn?it T3H{,2H"(QZܓ gɚ*aЈR:5'ldr7g닽jX=6^v*Oɛ4=$yП>x{}rk%j)ϏE\^EY&{?U/lI_›?|Ò[k1Yʹp+_̷^p*oҲU$ aVizF՛-p,:=FCtǢΆSǺ fGAᔰ^tO;_q Zgɛڪ4xqHk/g;ҁq5 /v5aRYv\ bX N~ųlEZtqg_< _^.Gć-׏ +)a2%\z=:P1\@/y2} jI$BR1_|V3'jT>t& b;Lr4g}x!۫eZrbݚV ?!tk6< _PNBN k)3E,y1~Zprflenx }YS osyM.5 K8$'dǔM,} >rlzΪ`jEV꣹[vmsj esJ٫Zd|1(vmU:Z/ΘE ro*͑?0; WBZ? x{ ҳg9dG M^U&`"`Fo38^>y&/ιò>Z 럣qr9Q)"6:BZR.%`8û "Vhf>NTO`LZ}NO @P; s+U%1Q5p֑*(%J%u101 fThDx&*Èʨ6B`Lp`N .8׿L0ad[ï?7N߿HeVj_\\OsZ~c#E7'=UYbQ1&Q?b7"ˮI{pkߘP1ux!q R˲61`SIftyLѽM-ơ)zKi;jru/<,UTw. ŏ_.VWOhLI*-w~^y}?Mt~zpu~BGYSNG?Lj~vЇO.  PB=ws-\&Bӓq>>ZfsF6I/Ux}+РFxtQ"x*mBT^Q!--j}_d5; nVdHb yZtFa4|&T[[ ^u2aj=xLgIt̐t0YlYx4t1dqՓgv߃gPv3eGetY2a3Q0jV+Y ٖBX *+uŕJ&̀;&ڕtY^k aTivBzZ;u*ڣٳ.%fN3Eؗ׺' ν:0@ );H4<:W\SZ[pZp!E7؛NEkj:xҎ๾ yrc -Jh{Ws(~Nj-C YgX!S ԆjϷq0G^U>S}Zgo`! :e"= JR{[zU\F:?9i3J8_9NK4#rzjgp8e DQ%ATZ?DUd mUa !J/EL% WߌN,Yp\&(!` ]T Z܎k_-/5b@{RDkS2.SIuneqIYWЬפkeOzTDϙP[?yH|0{7Gh1K]JNէ*3!oN!HrozO3$!}a|ucvJ|!|Az/fPG&A8Fwz-֧4WNb+k'Xb2ݔn!Ѫҍ&PUc*K-n ) (ws%A;vҽ9ٚiqeYQ !w/2Ko*SРB!'ZfR`2#,h_r:!}Io_h"8H!a}Zc,Gzq"ĀmŢ9q1bd|_Ӧ#w!o5l24gA:A&2VvO9Çhbt5s.4RݍݲfВ14=WJTl $(U3kQT9ղP~_cveg$z4q^( JQK_nsIɇhO/5D렁$Mw4Oyј-)#s,Q[7G~IwչH( ˜_!F#t\URZRf?ݎN%P羉k}79|W6M#N#f#M%JMtrtr8(1'@VKc8C"J8sUHR@ X*(y*Yc]ΠTwk`b!okl0G=JE).ժ0J8 %c̱q*o1g 2ZXD&c|3)k-/>צ Ιfpf!Ĉc B9ݞbX)&vA&G@o#G[Y ۿ׶𛪖D0>sH3CuCF BguFD+Q{G# c(5.P.H;G6Zrz4];Y@ dI V(kRzM5J ՘3y݂V/s0KKQ)ci&UuZ pJ5͡R(fvI^ͺL HK<]qVsȠA=Φx߸'dm'a(zϻ6W]jmñ66kmL)0Á;1c6UeZi/tN8Aٵ0SdBTZnrZyKYPrt+VZ8u#a1а@] 3VrTԔ٘```M%,ܬ'EN7{싓374r_PϺm&8ӝq,_wH B*s ^߬afd\u'4̂yA#}^ }n˾Le>bV!3)(Ob_%D$=>]c[n_y.APjuwئved}|q$չJ9Qk-e⫷d'Tds[Kgܕ۟s$lyFJT 7 ;[E$ƈtKpʨ qc dmSvk5[Wp@Jŭ6wx7GÖYclnvqҙJh&> C(|' qb?֠S붫˽@/TO؁t ~PNhd%kՎIpFz,xMr:>,\bsi_?\˔srv.9;;r[XM bjEeօ2#X: N8zXyֻt[>VA-=AjЋh6>^סKnR:H}JPa֭kcNgYK}zew]֫M)0SҒ0^#:/7Qv1'y8aH#CuCÁ醆o,ӔI^[;νf2Rns$pu1OU:X`J.c@i(37yT7\316v:hL S ʂ1``&řrIu%7COvIFQ!4-^pTcZZ"7h!x*"o箅\RyE|nħ#trsQN5b{GCɼ)%jZ(јGG $u"Tf|CO=^Szˇxs%>Q=jiJ)G)^a SBDU$;Ԕ!:$Kl;x!~޴9R F22)3'9*L ]:-4H4VQD'J&\D_|ւ%1ȕTk2$c  ([H#bTYj2`ů0\hh;$w8W:YJ*%^Ị%F#!J;Ũ1-c12Z ,.vIduV -$ >XAՈ)GI獌X%8! rK`ʜYGC? I*e5.Gdwn¢Ő ս0>ZC+8>PQ5W v'VG?]D!@â?W^GFW')~zU?p$+3;>< x}"pmLÈwS".m`O.O8.RLr !i3.vʍ1g'_,/W&\C`J<P29g@9_"Lde` Tm3yoV$H6w/|>yC9bA$PP h]dDE{M\v011v4LJp*jqNeMh98VQsAQ!%K n<1c9ᱮI> NI֗P1#ίV%D$rݩc\B )}>+swT~ڢJE=Sp-3Az&.k4+LkP2J ,*R$Zbv.50-iIx:2^m^K+uFp#b:xe+;~%үQ&qa;FψAlw( 0 (ȦiUƉ!'s DBvn)%PF˾zKR~Х f^o{ZQ=ٯnyt16!f^@2MXa8OF X*qqXO&Ȕ޾ւb"xIUX8+/W҅ 9`+vYLhl\#n(6(\ETpR'Dmkh\- Py,6/g{lˋ&%$'ئXiZŠS:cpXakmZLp5bX,$9(8wh(*F(%dVYE bMmj;J0e⌨Q! ˜ik֭TD @Dq-Ma4G$=E)&0aHQ̼ 9HJ;H1@720I%BlL AJ2)0FAyВYTcoLnm WXmIm1*Q,"L#6ؔl3dkI:\٦ci#aLLpW91, M53\*P :n 9+2pt6TR|:YBh·jck (&L;>->光;g3{s=o֯~fTy'/ -)zRsp T;H#T?#jj9Ob<V^{AotF8MK$K3 p/W})՞w[9vYfaq N5 2e,LӸfSt"G9"^>wvYbYV!?L)kXA%bx4H9X ljCU9ȓJ”TJĞ G~|ugs+cr*Աªi΀/q [+'49a1I\ #Z{`kS%hg39M"_0HGRQ!3YRc8, zTh-z&e)E7 SFkaԹ-I&h Uв$i0~6kXHL5hypx`XмN%ǀ'VQWS,M9yrFuK?8& + sR4zoHkF UX\jmmgD pfU#B˝_~cUHŢd<dWF1X\_OnڧNV\͌g.cُ Ŕ+GY[Y`xEQ9pK6̰M7o4H6\8ZPH4p5%BSS58F|5zKQʚ<cCy٥jq ZT#PKxަ!`3r hrl?b'Ly2.Ÿ_B;Og_7EQ;̜b-z)r;.ٍbpQz)a.=*oNÏ<<\L+Ӕ Λ\‌! };8 JnҸgIﹱT\Wtb:۝-᡼_iՎFk̢*w5#{Y>(ϣAvs١gATǨnI3qӯMbXs¢56 \2v(hڹ}^W_>W/#7ޒ xuHaȇ?K2%;_>]yD^rsCDTWq?!;q6_wKux?a҈b%)>?6_A4fH? !2n=jz18^}n(UA5QTdD,ߐs# Y[\j5zp-RupJd |V)z8^M,Q28|f?Fϵ-FQEV6?3ՌpՖjEuABw9AwۻXv |a8S4ֽ9]+&|]Aw{@~l qـcį(1O/Xs=gxF5G "NTa 8'] B֧8'] #$%! vwPTSTTP/Z2}NR *G-)yh-e!z-Ȓ\=u)߿q0zOqA֟߿i8/o̭Z5$b=j sX< ltS:t7fKQg#3OnoF16 )8Ӣ3+V+_YV`ݘ6zAdE&:E=ѦjlEU, Z];X&4,feILBhaZY\T(];LՀjDq.KQֆsW ^ :TvH⃫YrI͹F%Y "B!*XB}#4~0dwcA'.AWLܾ99 ;X*/nUE ֓"U! y\z$SH=r0K%.=1i96#:{n an/=QXAI,ܬFjì|^E+&ٳU s@.SVW6;s@TQ3q KeG?4_9lz _bVA"  mF"ؘQ݃.kWÖ]zZPC}`;۱qp?/2x^< =?#(R;VO\=_+ M1G 8 |Fiu{z%Q8WRsv]H#,'P d_R{_tʻ {6#=꽼.K?\Pk'MPӭK,/EDh^|vw:%VuTO*C09Pm<ٮb. )|S@lgp͹XN?kVu}ΟVsxFuK/252J'!a' %?\sUY2z#9@d0z$BhPqaDb#8PXyT[YPPU5e, cS޶S}4x%sG"vx!Ɵ%_ogO4MX֒x=L&%/{wZ8DEJk]8$c;=y1\䅼fKDCF^$c=%`BU#xt?ہ3Dq-6'Ht0,HmtJ/ ePݵ7^we=nI!;CX yYCȫf6ɳHVY̪"jvKbVqd0|=r7.`7^/^}ןTzm#*r *YpӚU^M??|o^ —p7]g_{^U8vo˂īf:|6pbHb'905ء$-'. 3$,3kAJDǂ &VHJᜇI < pZ ҜJN NK#e=€TG-Ӕ+%:IyAcZs -*615ʂ@ejIkWM69@/ղɗ_UaYʅ{6iJ0Ђ2V@ALKv\8֩B`faDA08`~ amSNs ڷc<ÓR*vDc݁Z*n<;6޾fuK05:j}va|%9ٮܭeG{g9x,gom۞DJ˓3oċұYl7SxGK <yӿy|4}|bAM ꯧpBSG!_9fVf1n.pP^{zbr00&'himc,)27M̂ CE)Ľk ;,hCr)8/uȩDNs5ƜyZk\۞@ϔ\3o{kޙ&BJ`vwڕBIk/?9~RRQw߿))Q<䢂PXj ) +#XU'8&PО+u"X1EihQ4b&. -8BP`À*R ZFFS`d0BOI"В gd-pl?3 j@WM͑jK mmd5Ϗ;x !IuW;ce(ʹZa^ pf 6%h51ma7_f>çX d6":xȈZ<=s/03-o&#AtjwW|6MO!ĞяۇW1DxX1wMW 2}~ufѲA aM/rdH6`!"%" ,9"@ǶXg -N,UqmmSAdoU^N\ !'uyh x[?uKsnIׅ_/mqnͨ5;3ju-ONOA-8) %ɴ|qq )$TW!ONiT&QbJ՞>P};Owʎl@?8fDVR`GMHUЛSH>'t wRRf{Z;U@pv0Xru^3'wSg<˙Ƅ\vEBUrI˙,>u| sTΎ^D:cu(@J2gvJC0aLpZ0jQ䰪Z#ZApkNRx6{L Qg)},hUV0 "#5K I9a$تHcwƃ1I 5KRktT8ؾ`#Ieǿf̚*i0-T樽붦Eq4=~Lhv$gf?k v n9'ߗ2B]OŃ)T-ۮΜʝF[sg>u)hL&So6+`mE/(ĵ+KcxtnׁZQ~OOOwLtb):\"ND8tA*ՙ<)Y) H_A il-Ȣ'4ǮeHeTDZȡI3e4i> Py6>_h)&N@P)~G"WPdMْPhTJv;ͲZ)*NhZZ2M|KENeڵ .fm- uAk)yia;nε3Ip+W*7 ^xGgrZ6" ׳ސge.CT%9˪Q+~xx?}:9m01TI7d6^ER+/ذt2#R5tؑ%/̣<[9!,b, 쩳EZ`G(&]xK&Gw%ʔ_aIW-= L}Q ƪ;+JT~,_ j|4/Rc j%/~f=dTFxyWLzV;w5B f%4GlS"h^EI2_`8E-mL@γ/2/ND-ڂڒ_5-G5/*c3>Rv$ÞiskZrp:FccՉioEn18o ';z]Xsh4@i'C>E9SOeZ3dSL2X) C8ΰ~|5x["o}}ѫy8CltdX4Yބ^Scٖs%:Eb_--   qIu\8Q( A@1KuR=j9, `"[(jeLؿpYTl"li\14[]ZAyǨAK(/wsƥc :( (j "A+o #ArW?O̝ C <]pCtG6D\U* DSz)et(AC&y NLbƍkʸOiegl6 - 46viPqcCx5$ 4#'CBlb `e%H&"Lom0 !VI\BEXJALZuCbT̠Z&M¢KSX K ]53UtjF Z\z@3 (2^$!5HvөQ9G;}R#,8 Tn4-A9wtvb:\6;km#u:a. XiY DNWuv\XKq1X/LܢG5.#…*kKW@#v֝Nn8<LΐK HzK{|!}3Н ]% g mN@Lvo":dI[W%m!ΰq}Ϸu58k K ,K5;nncnb*yw΍CN/-/'i⤣^nf'vӂvk~FیZ\3Vk 5wzh,K:"!iSK0;5v縏8LAWUhx[Kl67CtD6LZU?;+Tڥ(Ӟ=]eţb}oJ%;ڤ>m2vss}7#ـy?Xl\ +<䱷C`7#F4{8.19SP N/DH6tzMB/j΃k9X<ұjY{Zq#f,TrKM 3'D/-UeSP<' x^ nDfh}{K9=MID8!j-A٩a7'(KUF}3W(kwv&)wmqIW,vM%23Rl0`=yC[ɴy3I6oߨ&Ynfv]Z[#Ks"##oq^nٳRzLiH*\ n2u:]/CФDSڇO7FFg2*hܬ}{{ *HC N!;eL,4Jќ~Q]m8VȚu5|Ϸ"Ϊ[_{|m.f e05ؾTza§R_]4jsB4օ#~VW᫹y.,ihLgu78!Wv)&2YWg u~z_]:7 z_<ޒֿE܇,ל66jg>z[o1>sM4Ci|r}ki=ճ'/T.!G| 畫э"[W r\7J - O:uȌn}xȑ>R2V >fKFкPͬrfq-U{15(|Uw qɎw2\ Ro^葡7YsD O ^% \GUݓ,f֊2ƌ_Z?xCp#-W.:dJԈh\;Pٱ*pJ6nߜ%rʧxA9/iȄ, 26ĹOiQRPY&8C O+јl([l(<-nhy|NozV:M6m͌t&ՒigOQgTrG opJ=e~J9 iƀX'B Z {y4pHMJ ypw 27@,LUrwgw?]7fYPxP~8ݿ~WR*,no6)Z{d Y u*XWx];x*I])g%P/McJk bb-(pUVu,,?:`g8K%Ƀ!*/"ROUr: 8F&#"86n?ꆝ`EΔw5`\037o6y CT9s9E@ш Dfں2Ilx:_~{,7|cʐ7 LF`6ӫ7/Li:Bt< 鶷OiD:][$#˖l.BY\vgޒR ; 2Ll8U7}f8ń0SsL6mQ=^KI{2)m7/e"FW#VЛ$}La]zThL,J"Q$ieXYj䢹Xc9覱Q3sѭ+9%mZ[%Fn#w>߼mXfКuo5[u/z{CF=rpqQ=0A1uQhl`Z6VNrkSN.K;F~jW›Jo)mdcx֯! JaeT#׍e΢mZ4ҙmsoEN][ ޢ>Wb60)R{Efmdc3 eB~cV3/m loZx5陭:kS~I$4es (NuP?q~1oygѳ_UHdTPBOe1R35L-o*;$. deq0!RMY/sDypp"|:&Guj2ӏ51t3Nst3mN#I'f%Sp8ک5aP4.fb5iLei0IĀRP8 @ث1MSQtzǛ~*`қpJ;wZw#\3ZK6iu"CꙜGHkB9Hdҵ V V4ѥiQ4~ "j]HcYBJ`xqC­&y WFyL:5QlTߏﮨES=aviCh (,9L(f8F +7+ n 2edAaZ?.9T#l/M l-J?e/"uʂ-qΠ*Ai @ZhtޘJTE:٨S,*d4L <8ţtWAEC$VJ$Oʈ6] Y4}RĔaMWb54u_Y9J}@? $%W+reˬPtuhIZNb*r &0iT:q2*z cR}!/Ch2aύRN o.IieJ< Vg\)Oq&Y{y!Ԏ+H#}UQ|-itp&%06Jld(|r7ETPeBT+0ل~J!:IauiWp经}ě{bsqו|W_4ZΒOϋR7,~}z97Ͽ}Ҙ~IGw}G&Kkٟ8߿9Qq?|uuy9)9c߹r^ֹv |kl#&9/ß42JSg__cK녥X{(nÎ\™*my+T, ^U"hư yVQ`| ]VRD׌5 xϐe(ۂIݭNʯVsf^g_/018  B!2[g)#_ c쇳z)g?_O}~HUk?k?wK'XlwEfyy(im2!mYòtMzI%Cs՗AU1G@jo> .96Ѡ {`\M,fwR*3?YfJ6[Fy=Ju(Tc$N5@T\I><]OAD^VD7),ю(5OFp64[9rSM}-n-{n]1q(n#ruN-unhntCE;45AtFIt5OƋnݯgF><]twB7֊iijv>~UHBQsܓY䢊dw5Gz-²:v"|sG4 WSܨB4 HC-4틂Fߦ겺 r؍$Έՠ&@Wחܻ3TmpYH{Bjt7@!Ӷ>ơN~ZSbq`i¥ۊx\aϟ{ıy3[ ,U_͞2j%?|? 3 g33k ?3Mg{s^Z丟Vaʫo1YSVX»dGn*Za"mg co3]͈z$/l1MbnDt9B` $5z(SXZI3i@]܏|RY8Hx3H$$"X@z챠3G)&>%f++ ^xZ3!TTņe~0ڲ79Z6Ve%Eof:q&#聫+X_bB-v3z|*an܎ՎǣjElLLn% r;de 1y+.Ӱ(+sA[wQgLCeJ V; i.Hx2T= D.Q`UqX\UظpfyƗog7_c+G:Kă_3=5k5t}mUZX *;YTF@'-ZlRQ}si523 р`2tSjCiObb0cWa\ELc463g\̧\O=5B 3:h ѳ۰Vc4խ:a P! H|K ) 8 U&7\ozo(>M5 JԧȥIJ[&cK^enٻ6n%W:j/:+p 0ҒCr*}Ë)^$:"G@׍Fwѭa|&/">^{3(wT$jaH9w)ӡjp!W~enUQVU\RtG3~ɠ6w⁙ /ft@ EGZ?Lxiѳ"W fmoF-]#A&0%h#"L{:HrN)$0R ya|T)'SxY؊Q'Oݦ5+= )OubT.398%33yS)HQ Tq]i Of֓?<9a3&%}J.฻$_5ݤ{|2i'҇d0Y/L uv\!|Z':s&STfT'`)"ʴ G4 YuDP͓3f]a\f4mU14p!\S"NJ[xۗx HhhxLW;soF< kĔeet h˕߮LJlW YHWŤ5ntC߯goE921YŨ_%&&f&x1BNJ ˓!eբ"za3imQ"h u)bX}3vGD9t9 V)8Rh$ɴ¬Kf,.oId蓵LN;ʘµW2$sjc`Dಘ1_#)O:eyxBi$ul{Y%q,fcML6'E1h/U -Uk|bl](! :Bǻ Ą>_ yaذfaHhL[Hgj[3tX' өYtWP2!NHm)˔L&fR+df77-/ b~>T`@:3TT|)(j%DDZz**D񤩽 @7v-ˋф2<dfW} xjt4ZZ`#65UGw4 Շ }A֘DT җZ|MKn|a‘Kh8G:LE6vPT)kN:[(ִrŤgQFl$KHޓ n#zUQ%N VN3-ht^a1S)KX귙+y1CF#d03}SQX3lj |Y`Q1MDu  cN߈p®Xsy+/8o8o8Dir=I>ۿR 5ƌ/IK2~T 泴NBH1^ cqtA䄰=Ͼ%?洼8D YmYJq,N\GO;z/ԚВiu,mE(D%֧VDpm1WiGs7?}ܽD2eK 8ef^4a%@t>6 1cDXK~i7Q:AK~0ɚ`:ʻ6~?К9NQ=2*P?ϋt C2ᅴrgArL0i3C+dΆJe{gk+>VlbmդlOmIxڑ#Jɑy0j%nV]m yA2݀飬qIq-9GY$T|}-!ܸ3Ev+ۗl+aG~\W77&+=Xg/GJŮ:|T nG// qR$/sLF>w/뢸xEPR7 w.7'.C_@åػtQyjoe y  ,_vTe9ȞcM9eq4Yf)#6K 9b d,8eg3b81Lg : 3RM~egL;-aPW/w~fnRG|;qr7⩏% Tvv.:٨v~U3YN e$ EΑ{zP1\#dFA/;/ZMMb2XR@?&_A:6,xF]ɳk A.RkحFKwZbTcw L쁇tO:e`ί)>K\ɤ%,a*,,?߇ sҖ{& wǃ>XS Kb7)< ]{c/tuU:FMMNɴuj&%`I` A%wNY"sb t>K/_)®;E2UI8F_E9gR\ڤjO+4ܰ+nr)USm8w`|6B#ӛ/7ݗ! Q|<;H?_߷YO(\c޺7ET܀gmE3cyKOX#o@-F Atbi~A `\A#8DkljO1x} pH4n47m;`@EH&EU8U^89\ީοsؚ9^zh76CH.{ /vwyA3dWx5o"'81=% 79tXfXCV`_.l:ꬎה7ނB0n;lMMndcJtBWO|r^߱;B}^ثwOJkn͙%aMuՌ0^DϵfZ '5ɺ:;K͔P.ǭpb)>7/ %`zL/UKTW\b+ NArO55%SDSf e<~՝ҫ;Β%VZkyyq/$Bq/$U1"Hn%ː%Ҟe*+DB L&rfȒh1;Q-1;#b̻NYs(ҙ_L2f[Jt]9)qYy.:u7_u{3UvebVW]`&g2z1bЫJcl?+=,{P1uY)W'o* ɧr_ui^OV*9\ *ieЈjaWVrXjr,)"g\MP+0B7x/~Au&=K(r1LYL27s tI̘e.$~!JΦeS\U8+2 0  A^k `58S9[kMs*nfZYx# iHO 1Ub  +-VQ8HTg[brj&SST6|u3k' W W.#51`y@p30LUc0NF=l&3̨;P5#WYz~/5^1(҂=+f7$l+t_~qIYaoq{NM˷#_~{&\g)izI~q1ćIyW7A;3.~s;pW&?vrs7L5p ܙ ~tAGȀ(qشmIb?4-/9V:T\\hO2)`Z<'I+3Zk p0Z7a0&e6 PVAD}C8F0 WI%(^jMa'DNyh͘Rq La)a[.JOs Yf<&͂8cOԵ0~ yc1V9R\D^L)ස>œK#tZhucIxF8ɝϽIHh (/b%-UELr׍Tզ 16F) [J %`%0MSv&PW $ט>##EHqD#ǖ ^Hkm  D+s6Y kgjL5r#*` `9r0eio*BՕ$05M)/O lk(%ېq^qͶ[Ec3FO?$u %Z<7 >Xb~2QLQk0/~0qZI|p`^B7w_~uRtΏw>2e3d# )qM>>:u70\׼qd:}f0'kpKidjBid<*rT :X3 RmΡ$a>x֦]j+LC?֮}?&)#Hbym!޻-@Bt>6 L(Y#hnRO>b@"f\+;7KP )p FH޴H$~ <3n91{N['Jhݏu4Xwfh,3i_AUȩ"NQ4pc4E7%+ZQřhCP ړרr`f&1 3>Cd"jAZnac,.M1TSشab<%pO$KS,[(UYNlhB"T#!-+S )o*(sEYR,n*Vbgx =ݢȤ"`-Y̨ɔˈD,?J8"%8)(bQ>S( +CaAE !(iV++PٍV+aB#) V@&ݪAAa+de KSF1)pV̄fpPDj ,OӬR163ƄoX!f` ()$m"fD 8A+%pX̶V b Mi{1c>yOZ|L]^~瑉X2?:Uc|"Su.";d7-V`-}"Lz d=EǩFM#G&z(L0stZ#Oz K.o(;l[q჋"c+'A{sm/~*nyMi\uo֗?UryWA!wv֘.$pQ7*rcYѰ#5N0NȅSWX+Lbj-9b[̾ 15k րpByd2gIf_}!7G  ~$q*00z,X+5WR:0%#4tݕu֦_ttp U8{:Q*UBJ8fSi4R<{5|)AW>F2)Ę*QGt1fx?s*oXw] vމߍ8jh| kPG@ fzʎ 'LZ )z c%x)ySSCvmgȗJYaeq6*zKw擝|Ôf SU4X_ r$@yǔ!W?qy^6X0>dW}4D‘c 85xz2ؤ=_Qj|r.&7S}6,1^}z7+Aya9@Oʿ]=\_yr`K=m so-ү!_gře`_[Pxs}w|YsK]*Y4:ϐ&EHD%W|:xlpt#*%M*Ǽ0ݎ̼ۑ]Oy4g#0b.E|~Aͪc4@@v{ǃ3W8Jۻcdp0G01q+G1"=Oqw@DW l7>P$ZvP5 ` *AT5oS4 po]Sm0j;y/cXƆx֙J- Q"LX1PaK*E&ߵ4yqIڶd$[S¥0FTsN3X&Xb2`6%ঘeI` ˔IxJ"8$QTs.5) K%;IKLs'/w>\ŷhw˻&vOu`+< >-z׳[73pV)=\g`tlEcup;>C; ^;j=mu%))NYVL0LK{3rNy(1S7x>uEx]G'(*EOu;n5-щ}G6bO`-zڭ L6b4ե>OghFD > OC>=G} )ʦ,Cd{nBY {8JL# u S𰂝~ \[LCU4HLE)x\L)ʩxBj7HV+޴o+_~ WA@׼cʐU S3OtǣOYq(ItGzz2 #`6\C{P< KO=PUC3j2G[Y+dp4କ9J7Tma2pp⧘TL[aIJ5hPA([TcqJ._cƉHRsK11Ԓ[í1|51nO0xwD,iX8J 6ݯahp˒շKWi Yᗤ8DDc.zU>weğǏ|@ƁCP{W@o\3^|E ]=gzҞfIݕ 9jK ihK`c (MJ3h.1TT+p?4S;IDBMmFajfrkbq Q oGQ{cu,E_bR KWr;#Ǖ,(!3-seg[kA(34P-]r[ ܀%X'(dP`#pʜ>Ȓ,YPC ~50'Hv* xe2vջw=h3X[+) H55w$ KLM5ئTo9cN<:x"9'#M˜Q| $"b{>w_~vs<8ˮ;gEP1ͻOѩ.*E/ '/8z+ B?j D~+A`3 ^d= xOg6E+q:ͺ!/;k(< pzY&Z.(%vv J?/f^n ^Y. q *Z[_2߷(s3B,FPRFHH2K DI!vBII.5r$|mf|al?&Ef__6 ח tE<\>~(TI=u@c5btNE@dƲ(& nӻ1Jwf4չlѩhET_ebq0#8 iX[4M%U3v D:fRpAe8#{_io l@mj>ki S #'*^u7gj,{(`c9>Ϗ&9E?G޽~~Vz`@{xNseo}Ǜ3ez#0?=W0S<7CUwjF~n[xXEZc۳  ?F؏>x8Xx5l/ؼq9\~{.қU񏷿tǺ7`f[QDlOh۱V wyO, ?ͤxT0u0Vs8ҟZ{PNP?ٿ>QF^X1 VrV Ru˭ca^jC~g]s4+B(0iGFPvAjn?Jډ._fZ{b»ɗ y;.1JǏ6K6:A(W:-Uzzv! Ҝ05\a 0kV)phaւ Q@Dq1Tմ EմSմ&,_-ѤLdn)J4S Yf(0+5  kC7@k\n!=P ;n\] @ xDz0ړ NIL4pq\L'NZ͈ń>$f@%\:'23ΙJU\l() Rb=~О\I.T% #6C l;4Cqs.r h{M>l:`kF/`r uFc_BP'kۛyv CrҀ ^ad "Mzlq"% ⪎Ö`yXm[ Զ-OuOyةأxM4H۫?<(i`2 !vxzp&cUN$2-%]AA>ƚK )5c& ;]'1dǷ,< (9Ggf GeF,RwXruJuGJ,WdE5"G NZ;\6Vj̇ۨK`툱K*-崸Ey7v kvt&Xs(C2!sfĦs"LYfEaJ0e!#fДP1' Md+˔^&w~čw<(C2{e-O[ :J[77Tqʃv\a=Z k*I/Nt*Iq.EUe%F=W}Q+ ٓM% ʅx@W +I\_u+]^.t]x(KW*Y)+bqJI3'#FJ99$S4%Pc_U5EFh|R0&1sIn4S\;D RXu I`|L9<0IF]MtNfb4*JJƍ@.AH ,QaP[Fè˸x(]~hE. 5*:SDdA: %vl=rY`*Հm.K͉&t`+%0 YA.9Ho١R:ꄌSԉO)"\l;ry~fI\J ᴰpqZPӢ5zaaOM>dK.^\8pv *5ڤ> lB7ߒ*BLv|gQB1M")̅MQ4@UJ *Gbwu_1\yHY$؇`ח\[ t0(qLlan[ܔH~ioү" Ł(%,]EYte-KmoAi'NkaD9k@Q❫k0xZCØWdoqDYG -G L!)~\vܟCbZg](HM-M 1R#]S e16 ^mĔT-T u 0Z<ޛ[OJF "40Po5Gq [bc+kg]t<[\1 5y趺7g̻0uV$RZ׵8[ԲZjNb[qr߽sKE:eX D7Q.n0f3׀$:W՛}O_pAL1x'\jAUYϿ, YV|xU&A/*o9asAc;ث8YL%pE#Y{ac\>Gjd&d0x6snW9"" ι A8@$&& F `ԖĆ6̚#*v?"oLՂ)*,3oe1`Y+ $723=8xV"vQu " [`{օM "('vyc砜+9^ @ wXƿܦ:,XB}\ R1vSfo&/!!o\D˔w鮥B3Ƶ2Fn!!k&f(;ǏOk] k%=ZR59H˃̺XIGlQԈuJuvriܦMVjG3qe\체Kx!eov3Ic=TY A:-`.5^ PNpMDm*ϼ82֕7焣=m.8[VvT]eie҈XZ(QyU4qd?"4wOK`Hd (۵&: Q Lreleͮa,Ѱƣ/$)$u08+NtN\2^aDؗ58dO9**pN2R yDb[ Q󶑴(iWQҮ]%fZhǕm B ZFxΚZQB$ڡ%XD'kMr[DY9o_  PiK^Vjª x*^;UiJTʼnkQ5 Ta>0L,&!^ˉ4Σ䬎GuttFZq=%C`JTԯU>7=* a@ZPP׎0kz0i7 A8SYs= E4R^KD%wh' A FkFX[)p@geUcT ]ْō o-(ƸpQj@9IZQ݈}ig5^$YpRФB`Krۀk T ܱZ۰, ޵U; BM1y a7j=v:?[y os|?ضHV@zsTNAnԛ9 qf BR xPqIK^:߲GU˂y^WIN5UA;:b% "V|BVN:e/xӇIDk%C HNiY{؟XSדK]t7NHW-¸$z~bvn=O}niN b ,"0 lfӁ $r!cɅC}=wQpql3kLz|ѲRZeqhLϫ' 3q5]vϣ xtvv07 2.n{~L{|vnq)D6d&Z|{+z>)8Z7S"H>"@0s2.s>fGF @-* xBG9(j5sțq5 qD"VLZURk̡}Q !la9;$r<՚2+E/,$22yʪ VWTK6MAAJes+3u}6eIq(&33nj1_Ƒ;h#09 ,`W(vm2os6C0 Z刳KB-xAbUx+VP\eeL.B^Njr2dPde[^߆>^81_x*vrCdWJ*u Z`96;/j WxZR[YF|6U\ZV5E܊M^Y!UlF90N Z,j-kaş[FMB#e s Pdwd_,`Eχx$;r2 YCods@j8` >%g3BjSi䜋AJ9tr>;?忘s~9N%$䍋hLKn{@햋A1D.&l-Zv+!!o\DduYjԎv%n$ms &S3nQũ[ y"#ScM>v Etr G1 iGqxZ+'.5dV dqS g9pΙ "!|NE Fr%Oy"%# M (HsƿVFa 4=mk(ik\VhyXwK< tz濹/7AN_ӗ_nw\0*/E8WRw!YX@M>7aI_Fa/~bWE S\Vubx4[.<k* 6'hX3-zɽLI,`k5\A-A9Z8ũ$Sn~A$(.n'-;{Ϗ&^Utkz nt@,i:=>t#˕L7]hG}]WdRɄ_ }&~h]sp̿XKsS CB,X0^]`nß_b.o Z͕>ȃvcA L#яigx;*n{[o !VuɩiJc:8 j*NHhx [Q.Rctk Qm)Gb"ΥXwsC>VZ$N6EGJ̠EiӠX䔭uR^8˖-)U}-EZ.$)~ %4k#tg}ḏdF&qjRHqWmVlusK[IlB|tU$=s,XA>uVZo՗ViwVIj]Z֧Wߦu{4oT:%=WgZ2K,^Q~zv&5a~B03hr'x.O9vݴhé}$x8SF䐞-=pKj&><:a~pԂi[pJRܜ .Ъ:@1lG /'dߥd\O Ѭޓp=;jz <1 @^3xj3dZQ{^3^4T3eCxqMN|\Klb ;Pcp! iF8xA9 J7yHm( 5A!* #ZaJȸٱ4(RYeS?˿\ϟ?2(qxf:vю@x~9 + ;v3aB.wRh!숧|8A[=0o J.YXǪCSZESbWZ![P/!rMjQ<˒A#zR7B!En$syVXpJcXLSQ>! ^H:/N[8z:Th\c+:”t*Zd.loZ&-sX?7( ۤBRzz->^\\2=Yt@J)@?] c`~`-vkmVE"#H&M`2VD''{EI[RKbݭXH03db]W'@J^ AZ|M?oo™O5K*$A5Rcҏ tNhw -5(ST U!iOigG80-L=2 J1p"6``cO z i*8BQ6/|m|)7x'6s Qau ;Dž F!0Tjo\AW'7xϝ({yy5EnYwey;ZZ<4}O!i)X3es 'QJ21%s(& 2nl!3[]?kxS7lN11Lz0*V NJBhyf4)B` ƺ$IB^)T9 (׈A8Dx,T* TE&pR+pgYpf٭M[F6,Rb˕mdKPYz?;bNZ;؈=Q>[e)HqԄ SZCnSSl9R8"J8/5/(V)$m͏%Q:@gKg˘}sԒ+V/(,B5n{#d*":4Lz ADʺ齙>:7Tƿo!25>ҏ ?ps7fK4$]JW~z7O<.y !Ђ*YWrEp5˛=`B] <~1N993ܼ<23ݱcf) a>;̆ "!OTbĨ',7|N?Hώ9ΙMTktӱ9? f+&~:EZK+rަ\T K1^^>opAZ`hc k;be&*-9]'|&>cEnFw[ru(PcH(DTs[dsl> isT)c3r'ȝ|C>XkI /X5η q->XAdsQ)eoSG0j'vgTŅRQ; K h- Wkd ޷ct;|8ÿrTr[͹n <&)m3=X:d7/5htoW[7PJVyZ87D.*xajXʂ|Lx\mh(>NrQVs BPZLN!NŽ^z_P ό˫~놢%gXB+g $15-`_`d^=C_#卢JGxwٓہWN9ᵳ3aK KYB1 k&)ZHP: +=#"+R-<Kαה#4^6/X5(Bh6eG$ʎr3j"vdLby^:4]M{U\+  PaYk\D l6m9aAh$S yyCk>_=: A+*AElFE|*HQPF1 ΝTH̽PV?~EuWYT+|N ,;y s];W"=Oa/av)Cjn FCI(3P;A>sBG jO ~$g[yЊ(gh퉰=VCS֢ތ'ܥG'c^Cdg^Fd]E#bK>E!֠o&lRHIՏ?#bm9&֒ 2=bARdrs5y"31 gƠБS ?մ,@+k61w#3,^Ĥs,( zw~:s4&D xJz&Ê[nMU07_/t|-sV>YH$䍋hL1ztɓ?v Etr稣NwMyPg-?8Q5!!o\D3dJ3li7Mv Etr稣N Mqw-?S$Q5!!o\DdXۭCz[q)r1H9hSޑheg-IvkBB޸nȔJ)u[f0+.Bk˧1㹣 5HeZ?;#K1+[v#ciIGY|[0Zp3Id(M[El3戼U$ ᧏`t:p?yT0&UW'Tւ*U >^*g1;dL}ҷYK%HOłr6`LN( qg n2cW?Wy/J3'_V'TR4h1{VRuv:+Q쮅K_Gl ?=Ǻ{ 7?skqX0+uW$oJ_2$P9#TGg{<ku0ʌޟd7B1|+e-crj ?!z;t`犡e-؛2neϳfF6!i[< 1{(iEK }r2&c3BqDp5`z 8%;'aUH3CIpWB-thJ&v}FXU |i o5ۂbAE]uK]#z87oeᶄbRl@-Pk&"ply`_n*TrGGkg?=U' _sDUAbKvp3^B%pѯ@eSXR̛ۋF4-K 1H_h.4 2mڜ #*)⭸NSH{S#t'3zLލGŻbXRɻܛ׉׈(LuGAmdZN:M M.gSZtJV)YR3io*-#O8]'c>C^nכnSRۦ#Ҝ6[Mӯ1] c@%Hl_OUIig2m7F p*8#9#XMFSjpTK&EG5<(#*B_,oU%_Y6Y kz.h XH'F GDžxq#^%;č1<#Qjvz%P}\\ТBy_Sa @@ \AI7H/UI(Xb{Hs-(Fq ia#^aX,K**<u(29hQ$Kb-ա4(lDqo(!- H$CFJAL!d;2!ߖQ_;5rٓ9ĜU]m{܂ FK潰 1a8L- WL[/$3c[h)\z^YqUoaט#Ɍ3u5}s5?A7WwތgwVf}rաV"ΆZ5Y[hЛCfT)R4+59OYj@3*"ԾИ!8ZxThfo0J˜{59Xil , H$/bT^>=p k/z|oȜe?߼|Uz`:ٻ綍$e. jURsۋˎ.)ILR&[߯ m%*LOwO?^lяГ "7abP_\8)F]_57fnU !2oS<@6B! G/qc&V |T7cAAZJEc}1AL:zyR v[1/tDq2:C=G)I'-Kծ,?5‘ZW9vd=]L7'\tH$lXxyt/U MMe/.7'6)<6_J1Y $qR\im?<9XJ>Y$A8~v;ӱn:?_c^Y!Y|?>ڑ8-Z}}_cOpp(hZCfCΔV2^Y)Dp,gb-tJآV3zqtƇaеGN&0sN82 i!y!%@~JhJaIRd3C4*6â`i1j)ʍi!+ʯѭ*o_%S)B(kYOCU`.*OP lPbܶf A ΍s]/^Ptm*iI@()L\yd4KP/GO |s=>)&-_QI"_9?sni=>=nY8ګ|Y|mpj蜳m-U A{ULT|@fbͷǁoLjsk۵T~k:e<[,}T]Iv1/. 0ÂUtKLhHb~+fhu AKMt*Z4lvn点>"uіzh/vY}UͺDirr3}ӶPt:.sKL;ltƀUUҟΘy)+`CiOFd mH,VIm ars~sO@nP*#FrA!̕Q yNRtFNMcRE`jL۔iĕ_q;rt|h5=F.fkg3еe;lM1 ~u9XAf쪙0/\a)f6q,A+͋O9zW'E,yxua>L'3{l2sw?·9 ɼgV M3+ C_70= 2i܁\d땡:3cz6ch Pt 3ͶUI$,AgLZ{W[(Fm F,ˬ4&W8fՂX)uo39S2b)HHtVbb)07(,rg3#%kfM=:z=!jigMڒ~A8~J?#׉uw 'A򢞣),袇)BPӉ[J!jLGy,pFոPh.ypʄd,HA2f0-y5R2ya8|fXFRER&@11x_R-w;g)[|W.߇|MlëMWK;%C36W}`B y<Фd(JܪQn=źX ͥZT.U 헃 :rѺ(bx "m>b`~??!{*R}.@Ϻ*.uWiOm=u&J{Bbiy2x{Y@w0|a{:^ ErW~^#zzг9&q0ra`F^!1gV{A_0X|芴L?>[蹙(ڠ.>3{(Dj,IM :̀,Ax ?OwkĘ_'p,.,܌V"|PWAO*~(Cg7̮MR;_$c]rKvɱ~rlrDlIIl)qF`No05u>2F :Wp@]O}}Hu4&ɕco>jo'dv֖,o(ZC.mZJ^ˍy'׫ \gp "g r%r#~JC"O @zĖ#zs6ʀUiX*4hpUiԧBLׯeZ=~娜.sT;ga2QQ&(ʻ-Pe尸@ZEk'"CbayQǪ39*q-Z5뼺B$>_Ut5P=!'SkUp/%.mzS:łl9}/mXYWEhBkrhe"\031\T[2QJIN4J-`bKRT$ZJgLxTf27hBQ&;ѐEJ-dEj +[d vgzK~I|:ZC5Q"smK`dc˺TPo"7:6 DE&b Pz#z=㇪ޣZAȍ h3r<>M̐|Fͩ3'2gH j+G>VlD੧^;IhfW>Ec`ku待 puu2]$YLK_2LFPaA/}2^8^LkV@24x@ [%Zv-ƤA|CFR:қI4]-)<(g r@N" CbQ"+&LLMbL3Q.;Jfͪśؚ!W ŀq?ZL{%?Rb\;.*h0ݥoA{aAV̅K+#kpu3hTJШd|tOG^;^;,}=5:VgL0&5RfQ Six7]xCjrzk6wkoV; zwOr~pbl֊rxbt=|".}D:˜h)XoMYt/ci'Z~,W[SƼ.`CV]-{+ú4+hB!$}/,A5x_op}|N@"pztFȯop1HW/#2 ŒF~*x`|}&4Dz]bа-9}רYۨ-eQ&>QkڐUazWJ LudzT`Qe0Fn&a/e\ܿvT`Fg;MoCmi#g a<|d2"?j? !u#B_jqW\ϑ$zlv30ڼ\ATg{&_g0x/Ygu78"ud]\On\SzalO'uMO:ym!3xj"2+>iD*bNNh H=wqB?^'è|y/7sug0Łv|؃E̙Q^0gTEK]rmIOԒ<0=FTQ wR䖐!B2bNB*kNiP rJQa٭ Wq& Qzd=R"KX{-,ym悸rIMXvԟHV [EZ w߃ȼx,fp,nFe\+|5NR}xψv~x>@lT9Bn7h+hf,pJ3ne!Z!U)FrzK@ j}&y:Sl- !슎!U?[b^"nDhHY:4:LhcI Oje6By %Uqi4YeuyB:**HL"CH0+3"E>nDW<04ɘzkGt?;/lb*vU@3O ?,DKE\Bh(,ZhJ RfYvZdLiKFTRR H GDh+d#"k`3Z a!@ !,ЗH^8f>69hjpWs˲TA"h +`,HQ(kBW/tJxoX^pRPidp6r*b2~NbOy'Mՙ^FV椖SIC뜨:2osN.]0xor5:`,eFqK ~z$D ՌF]hZAt*xoJo y-0̉$;'C)"~M#ZmsI(Z69f$AڞEO-#ЧrƔf [ɭҢ QC a_F"%-a-oS{B8gQYL:c=$΍&RS̀'J% !*dM e(svA'p8zro-kq>8D)eN~U b d N4( m"3c'h*Ln&Et޵0mcM1MY7vX,)'}eY)rib$~88A[ij5YIK+|_Bn\n"9ҟp\sP}Sp<=C̈́U<mc] uLS"T523KAU]at&7JwIVZQ7k QPRgږ+[I?ndLLTFl^wîQ$Yq:' 3ح[OmTCl{C/'=j[ҒN} ?@NɛvUZRGґLH!Kv8jcedGU&- Bupk զ+즃Ԃ哢GKȮ-A1ZDr>Q1xQ1n+Ҳ+Vh7A.꧅i3G7:5V%XSa7 rV."$6MRLo zhw0jKӗ{?ZL Q[+=M-NfC3Թ1X `mjAE m0fyG5Yc^G5 M) >$ QEpmz=/U) '=zЩ<%oE7NHc3Oxn=Ob4s:έT[YRModnT3 _[b/y_vB)Ku} *1ZG%&WY`J**93]WU&w/S|nIqWHUF"VC4Ӎ&\I_$]0l94.,fAϒ/#xLQ Ăp Pk0"">0Ju(H IxJy쵈X()ĸ1ґ˚0*ذYBWDG*pV5UJ0nC)OBCĖ["!+fl)oC*Z"esަij@P|#J9&m_k$V!4|JTFH4Qӹ(m"FzQL NUJxbjw ¥aqd41OmTQҠr&TNo<ҥl.D:Cz>Pe8)\v@!q XƢŪYK|\W#1iαInQ,^YG}3]:-U 8} hΛPjoV B:BΠ[-YxT  L ">fSƒ%Z^X2# (Cls8ro5PϿQwDfC{'7~;82ihqVo]u:kndE/8a?Mi*k|u~3,?β|%|?hH+f/AGlp:#~|p rFL߯Q{l]^u=Iy::Wk[Cvag U:ݬ퀂L}Wd^Zla,tU[~j TΣcSd[={ރ[ۙmP,Do~2RIvYo2Af{>u*?ŰW(C\v=^1@?y=}vÃN5wyu}-w^mَ=y'|5im {{'>i {4tY3fk Zcaq$fB D>#;=2cpfThWO\>o궲jY]ES 京jLf5a>fea/_~;ngU@`Zꩭ9^KOtsQw;o.韎)c!iЏ;ߌ?(ǭC(:w$1Çɽy|AO<+KDžE7G> mG;I&s3Bm>륉Y.[iSOÛa_a{w{" ۦuarqM3遙ٟ'y.~ia|r wFC[>1~W[F}yܰ_ކi-#|.4k~+y".o[X-'^MD{?ml?-[{]ܘ1*{ߥiBaI|څ"H( ~P Ӈgx~_?}~m0kuGB\nyPH]=v`~|'x %A8) ] v pgh{%@^)~kuJBOlgVW9׭/nbrt^n]: OBK~5˗QNOr¤U"r6^EB_ k[o:wtڰzc4)7o[E ^Gs h>@˴9UrMdi?d,G;ً[JxM #`orm(mhl;.u7KC[8芗[;XF0E lIebbP٣TޠN;l7Q"%Q"͒f?u;'`Rj|wCaka|]gZ/n=mŋz:vz}{f35/rY߯QtZ,j7rVm,j5mRX.Y.UJ2GFkuǡ銾^ߘ\+Sj׽f3LiVfj׾ 5dT[A+S6ƕlF2ժ+Sf2s"kfsE̼0QKg6%dk&ːW{n2d[Lg3Y|,Y{s~6PG Lδa[O1)V6@5Z#b<H$Tů*RB.1^);)cXM`s&mFZᎳ.'V1Y`:feȤ&KG9bp.BB;)vn``: "H=USC-Lyfh:(rg3XH]r@ ~Q> z~YJ;D:ccN6LF9!3Қ*G+ gis(ք (0|^1 OƨP6n'#SMT@Y=j.7gӁbmK ^#9b!@8☹ ibG4LX")'#$,J !N UD!ҶLD57$|e̟CZdJVr4cHxႣy݄Oy!?78(buFQ_*܆]I  cȇsG؁ѥd4g@j^In˱ bτ`6c$Lh+k#cpx! D1WƠ c[p((Lз+ 5(\JĖ } AF* F92F!fB|*DQ=a+zFsJK;;9AF{ T#M(_ Rn\baz / #c#F4095D3ivN~fkUnUZ0W C\SLs30Q2}Q|#NLC%QH{)G:lȠ9s!eC}[ Q|} B Ϟ7\ 4Pf邲hm" ךJ' 2//3 qţ>Z @)W̾=ב<̊c)ч-%zT?Px |f!QesQTLyymLlZn(=¥R+.ߝ 2M-7*ӨxVE?ugO~chEֺ6fRUm٫aHPaCfXxUܫD)iJI8ahRjqzz,!K׊r*U?.IGa"48<"&tH2Kydz-uMb艺:/u*WrӖ:ƉhME=[٤e~TH8IzhB3|2 :e^355O\YM-sC3[uV.ձo| B|CS50QY {iH+Hm]h^XUW'w|T#fqI $UE(bT˚R<[ FESO*:kyCwK齭 5 C?%#^9zNRFar&-ȇ5"0hI w_)Y/a!='p< S8 -gQRZ_V ,gOVE(^zlT`u*#!u8- Xip|+÷YCŘ_J?wJꙉϩ>OZɟ>.GOh9t@u-'<ٟn5s&z&vsmeB,ʱD'qs^@?#Ɗ -1F6u`q 6ˮk-~50h6U5Z]2}Z?:2zi{)ZӊZ$p~%vkPcm<'e,ŰOTJ2Un%8AKT6o|(|;=J B#fno¶$jU 7 CoN˲6`dU3~NG|U/U/q{%H$(N=;sJWTn^~?Kegߝ/[[筓hd#/Cxih|q01 {'IќR4hY;mXIuBg8|.ceaxUGqp4U <6rNg;U `Gl^M[xVy]{hOleeWGxGxkM;%J (}xhxtGGQxtdff>QhTe9L,|Vl`!pϰ jdIwz)lY7X btZۢ!+qZUVX"w('6E:]Vb67{j@uIa 7蟖_IKҐ3E΂ { vt1yœe <Xy}κs5V4T'Ora8r% ԁk<ւ:}n%0x**;fp3F~3@y{qc7}6Jf:R$[_BcFh3͕u$u\*;{joae ԁj#ϙF!In] N{ 2qCy"c3DrØ9X0C-䗀P jˀAa,sMɹWG˹2@HU3B.4 qψ҄ape ԁ3]Z!JΞO T!{gRp첕&&dR%\NۅenSeps¼>@%ގ!A9$1H6\ p}:x`<(B'jF25sTE}h v':2nChh"c$ YPü1,̆a~9n|uVB OWYF)T7LmWR=}} u4T++O/]ЧnO]~br:;N2T<.sׂ号/娶Hj&5>k7Rms}:~XyXu z&\s:Rܥ1=s7Ku2`,w$.d1c0kVY7ȤVYѾ5vOWSEsY˙ Xtvd.ʺQ=ߎrHa3޿SCh$&Q!%EИfSƒ%Z >Ma=11g5_ی,w_(/Yeӿ{$yǭ6虭rUv~rvNI:rČ[c'x//o588Y/2sgyM [w#RO@,GYbd"xc2˻Ycy{Aw^8>f绝 )B=} C/ٛ^ 2u93g:3UN 4W.2^g=:>6ƒz6nkcCl毫/.iDgǻw[ϲm|9o$V -u#&Jw_v!>le///?֥+k$;ݘ og7)D+mUȕOf@vû+Z,vHiov`׽k5e`4ljHa++QL~`y3.sF'#zJ/TU?tV2 @/lkb XJkz{V7喾0C<#^Ӭ[_o ͗7 ~y47}H7qjqa=3Hmb:Ьޞø6f/C; >tRp1wTN o|5:*y7K̰_+Ot]h.Yuz&t #O3Svo9tގڃ2gq>kkP\cgN5SNf./g?ОE3%nOi!ȖOM97U@m}0Ѡhi+}ukxuM8oQw<;U<-<]G?l. ~nØ'o|koR:Nݲ5U}82.fpFAn|у"$R JKq6WW;'}X7_gA `+ Qw(yu=9E>'xbʯvR6mUl]Q7ꗗ@:WJyk!ě`bJx.dwgGCf4xWdW SRCOcea`U#볐JĪԸ GY(L ( +Nܶ鲹Q' '+yZJdV"ٕoȴJ'OY)W@ WӥlsM @ ?.N)J{lf~yUWr$x(ݩW_*cjz:A+j\?q丫LKpH]; 86#[2huEwGW+x)݈XIN;d8Rg]"8c+R!*5bRk<$pux ǰ7Ի]TǑ[myt^D"tq``J(d.B9s^-yk㊪[OB逑Z % ,eإT!$IØJfCD8<)PMHmWW6[9/q;@'B84^oQi.09zm Z )M%O}ȊHXxD< H9NT3RԈHDEK-+[r21Ȅ#B<ȄVS)km,G0=T"/ ɾ[濧(ɲ,K,#ۻ`g=|X>*~_U^: nvz~1h*oW'7ūKz,'Çw5lTMv'`g0`h3msP'0śXDa|Tl 3 %IR:}+SB;H&xuPsK5lNU+k99kzL9y\5lnЛw~e"ivz/o3#xsKp?)a QWTc5f@lTLnμ -!yR a6y:PeqY̫~O'0i~^cu}0}<6`,[l,s˒}h qJH77*+>q]وߓzv9WgMqjhS! t碍_,?47g^a8S>5ӵ wfkb6F̓p0|pwF_K!{9x}D#;VkЬ{vOΠTn\g9u06лdE,9 MKCțM'M,ƒđot'pN?QduT("8ZîŖuP!)&&PK|/Gl4M/M9@eBƣ\\ _-;ng\3Dt1i GĺdzL篇HơMZ`Yݗ 0Dv;Eb^ݐ2@{ LjxХ(qyŹ|4j֜Ͷ;.IqQlQy,as[Kyu6ON{)k$DY+U]!*,G*fJ`w&v@+WKVKVKV^uJ5#,%*&6ziro$eY8%(KP'''[JOf{, x̳S#ןz.{{{U>uҿRwYu'i+FY?v*!xgTG"y-M';jv_`-s jOQn<9BS >5n{1hl0<Ǘefm * A&YR k^ s3UdvX] ҙ⪱FE%`*`ZnP:t 29_b&kJְT]UU-bL18aMŹl?cX$; в61XTΰRHCz1u? ЗVlgJQ1(߶(Mjwt5SR˒R yK<9L!Hɳ'q`d4g A]"Ca(K,KdD47!d)de8&n )ú~^fzuʣ*PYXхWo  -B;\CK-l:ok'A}hꎗ huGa}hٷ%[[ZZݱ]ZrR)#X1WpPЋt[㟇kL*(E(|;Uk04ǪAZY0c=d@OmS*>u( )HaK2{#/tB|ë#56"b^Zڗ$t%XmNx}%B(}$h"kYK:Cj,Y2DX{p͚E~okNKz>;ꓷWXy2hUL4 |u6C\Ix[rg7g Ub s}r9.؊L;=j-14lfUj Ċ3rEPe'3<`Xh 'lBmwN7?J+vT Nc$Ƹ$т:A;mdvgRJ$ۑ f擟֧!OQgǾ6%1"eώ*H8t8J|.y醭G^wcDNۡpgJw!΢5)b;]>]/}h_6i8o}ū|%eh!P)x{_3X+hryج)[E}$QԸ`7!_B\~&GjaLB]weSiA]ɹE7Q]>UqDe01]LW&œ]0:ވS$xjPsk 8:-0VAb- J͵znRPkT#d^BȞؚ\HCVmrU)Hhjh\(@9X OzNQhjoX/ψcO/bB[Yd,9e"`ے  lbL C0=%k x/l1p7@rvfrK;榵^XeV_y tZҢEI?}8d~8۳[&zzgNN{+ʵX&Wi/NhJ^HKغ"[ЦA?Z3xY9di:½'rKG_ojSvjr=jaqx \l0cK5[\?m¿.ҽ/x6.7yrzru;z˿rtf}Kg+Itq>->O//>$iz3uRtC.EZeh暄Uxoy}Qm,:颣 j`16v^l-De&:Ns`U6#oYx8N&ԭ-qڋ;T.4?~q]FLo? sҍm6mY au Ĝ\4kJCV7>5u=f;<8*?BTIKy +TCM"hZPp 5I`zt+cv#cCsl6a<-QSӺr2o[YJ~iJHwġȣɣ1<%rֳhTc[w99ꠗBETQkg ʕDƨO]p1+>ʪUB-Y֡Iltٙǰ Ű +SZ2!R}nԠk\)/Oloo7@'~խ 9c^P7&/k0*8/0G  ?~[=o@0nSwl+xOGX=kh] ICiA [^Q-a&J]BLFj#6oUQ@{6Pj-O 8*P FhLy5E,֐W׮`?`ج!c;{3x_;X;"ce K6r:usX|j >Yu( pz>Yu4>Y|j>Qe"|]ORI6с_/dOxgGL!:cT.}CvҪcҾ"<+eUb]_ӎ>]_x#SHT{>NK{dZZ}_ҾҾ^Wjaܓv"F`}-Iou;i\zHW&-oՏG:i!՘}r"x+R_ >$ttjaLJsZDcY7,Dk2`yR4y@xgTS {2.7ncGJ]y> t&y˾U`E$$A]tY>J?fKOT6ޛkJTRd\UڴM(i!P4դp?HjWoD O8e1^;m(}L,7UAIY9YuFNW7y#4aZT,MKvAm9Ʌs3Yo!׷s{ܛ/wikֱ  v'Ƅ],w{sGzȋhco3;A\DNz]ڌ3l`c#:GHTkK/r:/NC3mj`afP&{6.6[a~eg8:gGIუg<(R/ c Qn8xj7L+sONgzn(,X@'Qk=_yH7聮:_={K_g_eg"}ꯟ9Թ/pt;yG?:oϻsΎ+֗RB#x:hId,Y^K3۴35$=3-.jJ;08I+rsMSrde<+[[]'N\ҧ:3n]n4yPSeE 6gWߦ& k|N,?JgM;c|WcfeI?Ra݁h㇝E뇝+#/wfaηyj7ofj'ޭ%2&)~^uuOpdOwዙv48O|3vk`twdطWS¨O?{&<+v>-xh<^/nf͎fNH_ytwԽvԍ,yk y֓x@b-zw)6y߻&z'.i]{k;?.1-< ɕ ÏC&':גԬ鷇6' C[RE{?}2s_OSn^:kqMoL1B'WxonopxsMzwϹ{HVl8VЋw_]kߤ(eK_7FݛnOi=۟|L8Jz7D?$'fwc2onFfp-Ў_h0_ tA,ݾ2t3LOҔ SXÃ`~ytlzf |>C;~v11M46W +T_Kʽx2LF4{CJc=0-wdVhkc'l[? CjKA){Hps^/701#gs3XU_MyNKxl1 wܘ+=x 6Vy2Ȕqph%^΂rNzykJ%|ߙڧ Pw.' 37QG DwHTۉ `N~6˹Iւ 81y4BAH

]M0X[o,UߚP| y!og֑8z65:%[Ȳ ^X &='dc?Ff6C(0P:D@ 9UyLzF| 08dh3!`_wiy>F b#˓3Wӟ%ľ,¬ȓ(Z! Г("T4%` .e-= ax("p*ͫ/fK`PpCK4|BŧeG,+hm{ iG-q&MڝźG8]אH m4Kpeln%D}kWR O' P=)z@])˽R)Kʟ(S`aD}]-fT%P`' lD('G0a(.a "0'v +yeD?jQ,('h`L🕼QI5<h\%Ď0&˰ ECoɘT+g4ES}c`DcqՇ 6'lU#Dg DZy1?c3K%mK7&Ҙ170Nhs(_h˹"“Je EʈXEӨX_;Tucm)y*2+aљNt[kj:l 5cl,?{&b.Vsɖp-/+HV}T8X˻Ȃǖcߥaߍ.ul8aK 0T%:4K'Ƅ\x$C`A0ɰS }A>K!`"Q!VgyƂub >$ ,]4]oq Me >J~2c7]^>v:|nJϷ/ǥ\>)#l2,v6u'['|afY'V'~ŧ^앾5B_=Kw\fnQ2*3&>u0M4JO"z($wChՁHߕ.00 u:i ̬|K5ZxJKeWX }3l(nC3r!|@8) ; (&!FBt@ pIF Bi,|XnL1yQGu]hKŚq~8@F3LAm qJE**vٯb9Xb ̙7T9(*!Vk{ik~SOv6uH0N7X3eefDp0t$+?˧Y_1GEa"2O]o7a6uR.pQV} brkqԊ''/sKIwGۚ.܊sLΕUXۙ>,:+,|lcm-COip-ĜsTn^JA5K4]9]NAh֟.t}ov9vp{^Է#0etӻ.\}wnObJ/<}:j?/^[DrR* %h5cr`vFThHxpoq^!OZ~g0uϧ [!FKDZ 'CLի.p\3h:jLi2%[]1Y=!S|( 9k:a `eԁ2x ^E4x1:IMs*pȑVN)TJa"LAqk>$Js-2*`HB* o&%HF:AvcMwߖ9^O thRZVbJ#8vyT匧EinTT}R.i$95R) RDWh.ZTKn1n6XckM4\XT)T|:E܊"(er2x)w'x`nTN0}NB:7833xq8+CM-o {V䶍-\[&Bo8}gTrȓn֞~s@^ܑv觌4alaj$<w~HcR)> HƋXp1nx 𻬾UkzΪ܄*t$7UwCGq9n-~9mwŁΗj'Zp/R V/-e-Cg$AD8ST[3\nWg!H˩sHotLUyTRՒqyu}#{!9w  }61eO!7(Jw[v2Kt2n#E~.#y=;Ǿe"Pr8Aju{+ɊυLAWۭ{f%Jr\ڢ\橰'Txu+wp/8b(央a+H+d4j+ rn )hBX" %PqF٣3`zңlQV"~ GrC Oaք{̅tgiPlL]' Lsw / h[T&(MR QF6AFk86H"4Q\LeIn~jóO.VдZ{:)6rsHf??_!!3@"prt6F1 E T3\qPJ]vI6Mc+oM~6H664CI ` mȘ, ,EĄ1XG GT\4CR Uc "ٳl< ;W`B_*H|!ehbb_.F[@˙ e}rze8F,Qn^If~/95!m^ƥ.@Lpߨ*VJ(BVWF!R$ξ+XC$HKqό?ͨYCjqkHQ)-)$rl 7p=zr"H@јMfPVL"h{{[BU}1N3iF85Wя ~:ڐ$UB1NPEF"ⴈ]m" ь{[Dsn4ɂ$ŚOSV֪/攤k}/ųT/oٜUܧ-5io%$)OJHQ] m'Hvߡ5zq74?kAF!v&PNXTux5|/ʺE|W36/ pERUGϞ\pmn w"p' @vOcJ Wp(x5F9fXߑ8 Ҡ VkLO܏/ꇏeyݡ/y/%yuqXN=̶ H\읹\=<\,]zOxmOů=dapx'ac&\M~$,ʲ:K(fR$Ύ1N̾~o>Fo|wٓ'<8|Cwy_x{?eE=swKfұngr]QxavUϺT|82v\g#*.Ke#k 6gdWf=}s<{&{sœg_fz2!>ooO3ʦoK/H{OKΆc:\(~ ܇)o1s>}owa Xo^]f"{x̓?^ Ɠ;ϰm1Յeϟwϲo=b?{Wƍ j_6yqNֵN첓MS) hSÎ#p.N+gF7ekF6x{M\ݔzxd\(3'On> YPK饔9]nRiѠt/K\S/ICvc?]WPHZULy:^jΧa}ϗ?ߐW@#rzn0g(TZs^JKk(뜴~G/=0Cx),I>Rڅ(?OzߴHs}9æ)p;ٴ˯?_NBI,ʢ /O>[o3ZJm)t ٯooRہ>>3n|7wYW7>;5p"04fY26ק_ Ӟ]/-2/?惉MW/o/Ӏj( "a>}q߇9 üaLT/GdOӒGk񞾙iE؜,_>y|2|V2|_L^6Muvm! xuH~bxb0J[3ug뿅J̫pLU?aW` ̞qČMj@ ׾?]=v.0/뇗OѼ > g/OG?.Ga2 /3q0V <„[Vr>N:R%}y0r`# "K XǏ'73 F?1`Eg_=->IO*§Eat ~"XS,{ l>teYCNrߧs=ݛ\Kح2t_=V됖)GT8b"ǼE{VW&_e'$bn{!"[gVMּ̥t~9/( 6z#t"(sڛ ޅ=IɺI`=u+Je-]0޾-r(I 7R~^y;7h=TkWjآ%Bwb3C|N,hCԹ.Keeu0/?~RޙUs3?i{eiBu~D)ڙ[/3K rXsޝn] YB6E S 1s?jg+`͇̘OIr>wiHS(:?| BoO=70̎{[NWG!,5'X.EpP(S@:X'oo7MZrZ+# 8D#T8a}lw\ΕX[_|݋z߮L397ͧ^nt4}Zlge=t@?^ D\,/$l=f{e]_ە+ v #0ߏ}£V(lLY`҃xh*uX` \>-FAbmb ,7F$11.N={MN8\{Ur藓aIûVmB>N? f6Y Q%eTrmwk> UxF@eғ'.YM4`J=;pDU">ܢEd)\$MVm0i߲KU{ V"ݐ K'ݟw)[e+E4ڟe4kBk%8ұ2*%&;v_}e<@?Os/4;"4)e-7'D$DZ`M:|8HxsYawa|{ Dr%)'Dj_nK 5Yv"C&t6v]F ʰ݉>Gn0)nVee5hE}RwJpЖ("X Œz[j5ƬQ 78V]EvEaO!k+:iq)Z9$8( ".%(v9܋yڌi(swKg{$ OLX1SE4ډJ.5~ox{?nwO{{zBD50z` ?g+>NKiU: Ux{M.RE>Y F+?ANG ]<( 4} |), 87׿oLra]|F-ћ<8.>ɮuu]g:.fbrF"E$$cL~TyX5pHIR69h`J+}1Sg%w3K$io0d /IݾCr:d,UuAE!Kv%ߎpG&!wk$R6{陖hB n-Y~"Edmm䚵tSDOQn$ OTQV_nU&y$7w_;la$±8YGqL QRhO˽"u*^5 QP7A{]HʻLj?kl{0:&mG5AKV!QL(3[k(a漽WY߳ H{+PY߳nԕrF7t*{o=:n6`2R;H=6VKG"n{D$ el(1vsB0_(Z< n:P^|2ޮO\Ab]_zV³UX{8tv<>ߢ]ї4Wf|TY9ev)di@7\=ZgPh%Rva*wxd+^n r[smtr#鞉o"$P%:JXzJ!.ś^94E7)r9.a,oVaNHBq2)6 9&ǐ&k)4aB7Z?RHzO5i/ ,^~wPh0=O^;B$<@ÈyG1sIbxl|#C)Ix̔ʠ8-(1#Js}13Fyk*S!dLBz'Џ0U@gݾF;=0~4[{y:hx$< w+M? $UUe<*r :ynGWGa.Q( ff#Ńx[EZY BlD{co>nXMj;8wft۱qfHKZ! ( QLǑ6bK$be7u7 SέH\8aƈD K@!H u$&f4h$R%.ݍӽ V>]N-DXO_نV;PVPظFǍf!QbH2PEvq%H|Vaٕnٍ#.11ľ!,w8Ffŋp}Fwyn*xn%mӡsr(眫AqlӘ`#›ĖJHĚP2yD޹:1('^sX=;(iBb* bvJ-]:4HfQv#zlH]`id`܊݊ť\pnŃH(]U+qҖnmI'579)S J~E E1Y0疋$~'DSe Ք{W`ž(Ch}MXc\/@sTާP"R(e"|'rRe*Cx]8M< žtpw74@Q!9lLgZgpi{Hg*ḙoem]C)nD,p7ŴB H`E1Tp؝b)D&N'pTu;`Pޙ\m7v]C{iwFZ]m\YWDD7]z{|3-V9lgl6>z!psvZdBaceǺ$'IȤ{49K'-ĸE:恵i?Qjйz; mz25b0z-k aʹf(81~t@lꐴ#A8mlp b2ۺD̶n8(,(cڹ5P8-+:wwU(=cv*w*AjMc#IzB&&\ dAM0H%v)K82VOTbǵ kZ}J⥂6\r%WJIDŽq k4L/Mr.<`ʦ4gX6z$ldy%gz{kzz4bdjBbX3G(u,X$ S%x= 14!t% EMf@G!B\0 TzI,b˥"RB'qT3.@p_Dq7q`9f*ȠJC"+^s3}!d=Q}3=!VoJ1+9s?0 HHɌ1D&d@L(puAR2 &j$g()NW7DToV'Ԅx-PFZ nFh;Is(~8\'`sèPX}E2ţ0`FԛWOm$ܴdBx^X&+e-UH Aq:7;nq+N ƘܶWm{D9'j r|Mdn&^H[vXZaR|P-9D+svHSUjY) _"Ṥ%wj]%r4d(r{rB\0$/pl ՙP'S5:0E5^u^{ں6w 'j9ל,JB`e Z7r۵n:WM-&ƻz0Lk^>]1L9 }k݆8ȆN;J6r]3蛶o`I mX͝C<abXƢX"`&Һ{!iJ3F6t^ĖgQMFsv`y6KMbW.(% pxPZIXl%l1m !DTjTY< \ݷ xd$9B!ApHVD(L@EUbHg T[>)0("A-*8"Q é`Ґ (2Ph gJ\`$($*.T!Q$ Gu:Հ>IlQ gP=f|ƗIR Kb)7ɺ# Xѱ o16ЭBͬ.,FWE)&c2#*A3됂)i," CX8 )26 Bc[bX:>.^0m]4 o;KQ\> Ge bWas(ѲV9u֚R<[y6aZwWV ,Tx3 k?2A**/.2ffYwK- y Ww'a-0SI{U|ۿU%7c2a49~>0m< remv\(CEʗ7&kAE`;$IsƵ49$ɭ Hk$F1ɑ]0Q$AjrM"ȡLM`9txa]NbUjJRNgsZ)Z>Z9`j<;(Z C\a4< (gsh0%A: D.Rzk S)-dLm\R o4wK̃ OU6ŷPz.4daیsiËx?Ә(ʳcv5& fpu^ +)yBi_^cA2ҕ:6Tp$Hw%NƯRl7TwZ\rhS`,M)?b6D($#5.ȯTsËMm+~˱ߴEnz#aQw޻}f# q00{y,z;&rC]oσ`$'ۗ8v Vj xQӕ14AwL SX Y'o[Gޖ-V*A 6)jIQ_JH- x+P@>!pGԅpOm@kKT6 }Ff*\:gS蚛JqbYغ4pHPyj^HRk('QbJQl6Ώ[…5#2c{e|c QVifg0^,hSsmM{p >}a ƥP,g:L;z$CzlE */-ߝU1d0.Fl͖Z4ݦCJbf9b˻^")3ח\f4H8p ߶R!z^߻N?;Tz$ Et?BԡXl#8Xosk;V3z;Ez]f !Q$ :2O$1LHUL^QQm%LR0P :)bXW̊7> y,Fi-C+ʙLz 0@lD@0B*4OOHL^|B|>$8RLz4ϧ<+̜+bctN wU=I-||+L&.V#)3q9B7u%.˱mSNgV*U(:.Y 8x]gj_ԄqPI2|q@P? ^-X}KBQ@"OOHUL^`*eJJ_g5SoQ UOOHKL~F $ eNf%_3m]e'M`39YA׋ lG?MiMzbSF3L* 571k0.8 ~ʩ5:rcA^Vő@i?N/ÍBTXt Ez@+:#Rk3)|8 X$bx顊k*23R9L̘"THVrgB*F)MvI%cZ^pt Щ`l5kFr(JdH59ZS T͢WUll,%'j_T\X'/ ,A.Vy;씄.tm7a~`FFtkAsVc&Ygµ#xӿP㢵30E[~KWK`G5 n*8P'}0@Sf͛a&> i!sMa"]wp#H= {7=1O9[|:]*,Q+nMg`ۀrns{ۣ\EF(VA4eP$718. 4*q$L9-kâz@{x~D?ۋ˃wKoۉfo=nՁzW]ԺހȺZλ}\c\0퀀Lx^^&aJ3yĽn|]k]z'g!`8um"ˌD*-Fs8ޅݔxY@.ϳ{gޠ {4FR|1޷~y#;ޞ\x}t~qrտ_i_w-6cvbۋGw?{x,qΈ{d"[{яGLJ{'o/x'’c. KeQ}od'XWn[fzz,eF"a:\R? C)Mћ>0HҪ 8a4p>ts8{+"/j%F =5KYNu\E/I{w.l")ɶl-έBnx_eHk}<_Za'0\p=^*iOVu j߽x-4R_Jf@YԹݲNuf3qn.+;zfՃU0K'y/3g\[? L*o}ނgvs1$Na}q"+irk{O*twAhҖ.#:iQ~gZFO=`3INF3X8x3 .%;CN*u /]}ԈEG%[ vtr:HoLs>킓rD:ŭZzMy|h[ϣO pRɓ ,i${>#VF 7f%=߻M0y#i1!'3ӵڥe[RZ4\r" "8Qҏp7S9> $HuO;IQYeRiU?-|w̢%biv 2gI,ӕ-uv^8kc]tԨzx2| 2w ԾogLQ1xlzkU+J_S{9+Kw8jZkvJ#/,G:,h $XT1E$H(<  ŊDTQQqV< 4n?RI欍Y♩Bxw@$̤ Xq\>*Z{t_#*%ǘ|u\2XMfiz te6W]qN'W_MhR}kMpVtQi4wU:5`N?kڴ.n[ƘdѺbP:sc;2U[Z)4wU:5htyuK3Z)V%Bb3B&s!![ Ba. A؊UK͌sjΞ ;u6 1uoO CaFEGe}֘L?e@ȬyhSAMƢ}m'eZ6eAEZ*TVf.\5p({!z@)0 9XAH{`*%DYbTlm8؎ s<YpPΥP|Ǐ֖۫%8ǵxs%e: 9~Rx hN? wXf-%S;1| xbn/ZjݦА߹V5MW&.>^?UT\. 1՜sߗmB/MZH$G|QfPJO؀ɚ2&&9}_}R6|dɷp% D>my ;uZ CNYtڽ,}_ ߭[?e-fC9݇څwtt}VoSEl`y"E(*D-W!}2iNҳ!}:yI+N8"> _}mX3uDGX2Ղʎec9Ҵ\NSщJ;YFrʕiG0lCm ݦ<=/ޔegؔ`yZa&{ֿl?߆,]FlZ( ȶS" |VLˀR0 &32(T^(iQ)$U B}jL |@͚2 5 .6B`*Ul$UdGѥO1Wgh}UOᮯiY$~ ǜ?_:g jE 2_d!>IaɤcZEPކ'P'И` i! X ,*D6L }I{v.r}_0ct0"F@L\$Tx;甏=>fi& )φ4!0 EHr߯B$s F -Xi4% Q@&/W*QAwFQ,}Q)Zz{š3n_-M-zPSwcd1{ "LLNX‘‚FdHiTi49e zN'i['WIyH}v.l YL&zzO;(KG{ EoX6VU1@uV 8ցxQRE!y{.!Fi1IOx$GspO͐9֐!fF(j~;ozK ˴ę1"D:' EցNtHЁ9]ʁ9μe~Ty>d`af#7+§Bd;dÐ@Ԟ?ŀn}7mZP:dZqRТ0zl Ɂ!!4 3cH 4F3Dk3sN9˥f_X>m`F8R;tXΒL72c~R3IekrzwǛ "]48_MզknK?mb['2-? ,/)RKfy%SEHV2DԗS_HL_H}I/KKj]]h/[s܋W8y\'B%e1!+u|g*nRǽ`#wCVE}kw>NV%%!a&)mWQR"9KKY]_p:٪ zHt=$VtGtW7vʊf%bbGM+cg⬦#X58YfCMQʌAXhyIYsg>㶴>uPk?.6]nW֭z~MC%~3ka]}e7ƻBi>rG!Iw4A$zNyG7~ (O( ]zD׏.yzGy,eY/{AW 5=spqf[B.?n0D /CA0O;oeg}y| 椧7VlDuj.?xOs$S4x*?OݾS2iP9cnqfts)_F4*|5ƑywLڏ;Χ{@CO{GZM7}[.:{~ʰ!SIn/zЃb~ %GF)%&:dr^W"ਞ\GẃK;Yڙ3,H92*M9ׄ3|if|iOӝU$Ȝ]Zuʡ\rc9dVDƘ ȜuB-"NZ{Yr)gRbTJ5C2d$@OXIlUj Jv-`M-_*˧s!~~ R!ؽ~華Ji cvJ^3ѽHAawKHe><_'+mS),,Lf;$ذ+lѿYחҒI`]u|c'Y*'Ef C;2jPN:&G ;s:P۝9F?HE6)Ƕ%t@e 31֙}ߤ DaHLG X[dh"ZR 1vKMԉbF[0G mK>:"($=UBYeg2 #LEE }i(Y+M)RԗhDp8\+DfijQRT+ƎB}O H7=͆ㆼ<`GH*ym &Ѥ JJVTV]Yt>R|ט>K-C W #LێGSte0σLFOIAUʒuX>]Rb (:7)pԌ b.z郐ɷW;A"KiV0p"[ @D{ )J\.&sbb+D3֘XJyȣ;o+S1;t2B[F>oݕ_E1#8+Զoh썣זRu2W?@i;L?ca>y\k3~7w/oo7i Odwm7of=2s6iKnb_8!]XAOU z&:}ᖬYRHN~Gq}vQFn1ItARpm0G'N҂% *{1֬H8 HO_W[M"Ɏ -XST\~N")2l)ɥW:ؾ+[NrѸD>c62YŤZ[jm`|>8ox/H=KaӪlfeiB i<Ϫb3n]o]+Fֲ5l=G _Ub .^4g5xo{T4 0/^^vXW{vD0hgBZ\H^%a G!cHKu˨>"GX4/[]̱dOZNaijN Ѓs-[- =  Zڗ%QZ Ld{0kY3"iD֜7#.3T5ZymE/m:k5C!5-ƬMC EqºyOTs~RRBCM*SIe0T4xi[xNNPZ/wTBf(p@y8YVV>+hcDEB'_j d-)dnwx&kШQqםNE;}0Z3'ٙ8E=w-}ی~`|,\68Þ3 ؄GTx(( DE#vg725 Iiv(҉` ! ӨșD6I}"k 3-+ 'ϧia:FH5?ߺG7Uٿ&V4r9S)bŮ[M8[F^ղv\[-繹H$"l! BF` [PQ |,^A v=cyb#s,r!eL[?yI'ndGK1lx%QxQVgCd",i ryexY5,ǠX.z6}×m>l;4x%-oSхwgާ/Y6 I$/ߝDEj/ʅ$$%*J,v%*:v]>ߥ[c9}jR%k4.Q8ky=yY{cSO5Zsp_9%!q9 "&gEj>76nrT֬YW?mW~> ƲY11jrBKhs OҬ(XsmSh4'&|j&W Z0 䓃Ӕ*iVpZzd.g>ǐQRچueQrӞ +Q6ZsO+R7w&"B>0o>hiNIgg稁W)h,,5:}1kijnV\\q `5EB[}9ҀVA19CoIҞ5NR6xzY^\ޓ-7r#+ x6}(_L8b=!> ^**Tc?E B^H$q !=mKrt^JkUJ t\n^Q5iۍ_QGSynUZ>6oR oFIuӸ$Ehz0RzA q0"Cp<AQk"qIkEIih5F6=#7B -uRljΒ@U_bXNM4L?Z[đNQ!)MhZ.W\9rQuKZΥJ(;$Z3&ZKQ A$ }&OsRǝZ9HK1z~i0]`%6*sKYӱfvlp PHY{|uJ_A]O0S ڇ(g9$ 9U4(o'~o=pߎ5[#@0p>ק0nRƟ>FE)ޅepm#S\ fbH~``N]DC0Mm8g߷]t I ֈ暦.e*y I6 < ]IST\"Qc|DfZn4WΣy0N"Wl 1HFpm': o -// (}b\P5:vvhjZ&}ku{. ]uCRA+P:+{dtzENiR8/Sײ}R%=C@dTy0pS欲)) x-ܑpP) C*k4#=ދnzfߋ^}'{Z aU+\Y$2% d ]H\FszkM |w^3r&pXM7]Va+@"NSKV ^E꽔.J+Yr6 h&'ky%Wyً#. W|bՉ54b_?zuxTgr[p<YFヿMj/lO n'VUN_iZVRtW'fJKF1T,sd@&zDDpLbbJ}6z|wg:f棿k;Yźƣ|?myI%!qo]`>DTc|8SB}~X~<$bݏf;tKi׺=Űt0m hnS(EWlWs=%"ŗG1~}iWqwx]}?_nG0]nM0+&qW$edƟlI8jkOÞ9#Ukè R),Zw+; xfu]֢v%fBF͢;XK T9)~̗"U2xN=U!$'iXq"4a6" [ ;Gclxp( Q5HJg9R E9Ҫ`8٥Ŏ5q2,Ú FTYtQQQ Ω ̜@J2BXNS1="ք$8.?5'FrB:k]7jܓ$k. U*֥~;RZ9!ԸKw_^4lJ4x᜔ 4W9KAk!ΙAFZ1Pz \mE/*ڱ|^`~ Mh p!%}ދqjk'W.<<a%i@C0fĊs$+摪l>B f9:V:Bhn[qR+ChL N=)oVRm=IZgoy3]eq_5[tp6^9=\v+b$3]T` ! E%sg!668 ·нZֽ;$0h&`X]S\˥ "JY,J Ar &4c:G67n+uC !̏Q¤m&MDa*\R\VFsQ) ^`NΗJ>ĥB1g EѬ9,mIU;3iM1@4)U"Y9+\6q!$K#+mAx (Pt)ОH+9Sت6kę칦@ ꦌݑbp )*^D-Qpb' #Ht é`ҐDPhiŴZp+Y*k.a nRDE4`OB]J!˘:Dx0 UF2VʔP-)8[ƕEJ}Ld6G :V**ƣЦHB7Lkmo7?lIəfvuح X(j5éHW)! ^G>g+)tZ"5sf(g3Ui2*T#44bF0王3f(<'˻*3@^hVy-B (Ɗ; V:(\+sf(4V;IEJZAt J3egPyXi'g !sꂭ #xl %Έ} EW{[ <9VVCv I?>(L[·H i=") +y:2pjA.2Po>,7v2yj$;JaY-gV X1Vb0X9߽atyM|i~;&S,kw|kbZO2eՐ nb-SkM5oG'}EcS2djR4n74~{uoz ?]|m>h۴d,13J_TsyO5;m2Ǻ8 ʸC9u~,jXc$';73C}KSxu,lvOv+\qL 09O)\Z- s# ,﫿kCw_.a ^i{^-%mn{G`+3.+‡2,)G 0t2>֚ĔpC4B^E &5\8UND(5WFQƘFfr7 ׳=@ā(-WE{9ʥކYnE7T%L%'c*EZ?SqfI{Ytq:/}=_Nlaq83UZq\Q*@9Ay.!9߄M (W_X0za#Pik~SЫ[*+;h9+Zvo&ײYgB^95LԪj2pc-0Z3.q;Ba6iD{H* \l)B?zػ;[zWݵSF-_0(OکeLu"`TP1LP2YD:dZQX&DQQyńEv1PJ46K}Ne- i뱖2GXR8=J{1`jЊ{1$e?d}tc#BQ9nH+a08\i6D7\hZ̿mw>o̦[h;M_&OZ`P@[#!'8.GR֎/CU oB{ J# <C1m6HEKj5W8Gv/+kG;}DXv%()x16f-,Үb/ #d۶Ҝ;*jS8Jv.Vi[:z[VZYqekKw**1{? Y>Ȑ1?Ac_VyےUȚ9<_~([xĭw;fcaN:huwwY Z.fAfQCv6}[:8m/f4yUwv&8;G[?ygĻL[I9/κLo&4ikJ"'\N%tkޖКv/ˇEWzȒ;O /k+ Y5Nq,` sV`D/Zԡ_~w?=k,pbK@:ۆϋ L'pB zs~v0N1h"zQզ~>|žFm!kKc=e^1FS5 vGckk'D.<59ɪڐ޹U[nDaǰǴ'wbyW[.z#Oq*̆2(t!CvSxi,e?33yeO )eNY{hҝ,R -ern͞Œ{ak Xn%l`z{uINr]fgyD>WPvJپBO\+-u HwM+JH=lè:FӼ".,$ 뾻v,&A;waxc[_*Ph<0)޼;q^Źyy- zOXlSXRO6ng0gV M{|)u6v| 1KDXl''Wr?}xO+4:v=Y>O40~n* iX"dzճޕq+!,1Yl6v #~/F]`xF4v_uLHێ7qC7߹gbf5?jK{zӟ;n l~1zP%{7^_mm){ݮ8 "e.*_hmʷ a˞؜:x[Iexix#A_\J߅wc 9BrduDץBTdQimC_!ԇدܑ_tk}4O[Np9ğ|pTI[QXv)^v? +Z4??|ܣc:b EaƏƋu"`ػG}R;8IyoC{]BjzXVdB1`<Q1!kXH;I`# &itNqv\v%#NG'n9֑_XgVt3GEW0~9,}v{Avykϭ*@1 [v(w92V* iu\n#]i6+<^/U ɬ_'Ԥlю}E/ CzEu7/$:l,{I&*< [:lJ;{X2@t#(2mcٚE`뵔ׅ-Z{^^]|m,{F)dflvv\-_* ʫVu~1t[f~zۻ IæyY-Vլ|̝fU >X\eʭ77ЙYlzLUʙɨ <5M?5Q_Ai0/&p|{/j}(ד,NOV}?~gϟʼMjEi UIk +䮙Nż9V) IY.FiL&k_S>??;&,pɍRR5fu;-qtP(Ô۶?5H)kGɣsWoUբPƓJLzѠ֋jT b){ ϸγ!Zih9>o3EmfC>N4;^!#Kc7bJf~Z1-ȲI667g`2 DJƖ"ys2 2} c*@+ #mre<**Q/ٺ3 |,qhC(N~܌Z;j/eJjFbCD$dmn1(#gql"Ts0:7J&CS$b)=^_e}6*ִ7yrRQԴ>M&x %nr9 'Nw܌@D`s4= r]:wcABȮ}mI/3b0HA"կ^2Šk 3,55Fsu(`<,Y]T\*"JT(>yq7@Q#*bPǶ~Q!d 1 'b`{X(G8i_SމP}I4Vsw7&H  P(HHk8 t 5*_1g&*,yg(R/*QҋX,}2Sr`-n:s\^͞ ԋ JًQ$X1.%V Vh($\R {,I'!Pj0Vf^yخ"S˴^ Hܑ5Bp r!)46 $w%+nU I-fuz~ڒ vYI]^ Z8؀bDYp1O]V$$0ZM*B$` Cd5;|Lfsy1EǺ?tr)P24V0 QA T%44  i)QL.FmHt@цC+*ېd^x}hV|cm~e,.=]fu4%[tQo9djgsu6o\?1\/aP"z748hvm5ަ]K&rOԜx> 0b" ׈h5=s_cd5\p{ ;u8T `˷$*dZ*`$#w9 JQ@wv.9.*Ywgi/y1G-vݎZSTP0+ޅ#iV'w3śŁZˎfG`y}S^NU5ʂ'㟒(/9#mrWA(@Sf1b+b%VSB>RU=XBmRZ]rn/AR)4 Tj$GBk%JYi,T/tdqfCv]so 3¼LE 2=;-va'~⭽kN.6Fj'R3T F50r'm. *rXh[X F‡IscepAӞ8x"Xq20wMcPD*1$!g=:t2-j)k.)8@=w :VL(cҧw:^&^7Zbw-6o]:w#=;>uLQ8߽rEx"'Epc=J<7h)d 3~y6˽~RO}c+Q(\\R9u~k)tG+ ܔ"7cQiNljL.$|WZJU0ե)f\g곇jS0Iv]Kôr$ !8b9W+}L˳Mkҏ6mw <8 /!=C( gA8y"7VIB!(%(|H NU``cL]Eq^+|H = CGqA4#>>@-AkOp51xyd؝ sNT"an> ><z!BbCA~Q`IC0Q`d׼ff;A6\[<,NvmNxmcŧh9u>pO,[W BN6h/f&n!)zNyO#q;QJ"GCma/sN(e`#Qd(HޡU]a1VetOԒP/1!٘ 3!vw'3(yPf2l2>*`DNLrOFSm$2B.wcYNB0!!j]: w#+\J]ŻF:A$55 8$P1尻hL4ǧ0d擨@P I Q`4U`]{DT8)5OTp)+MTLdo#{J<?(E9u?o.5eJZUZl?9$SBp#t7 [MNEdLp) I~66x\xo:pr1W׫.^W9f@%)!2B%ѳ!бG-՗kFHm;kFPc֌Շ,!j aLl&^N.Ho&v$UnH¶hrACb"N33 `O)¡-D`DHWaA]jiQ)59U2nO>{QvRY֌!a:wa` ,EPQ$ɂSo|@2 .Y%pR|jh:76QܝW/s2JY0V%@jnI+1̍Y`Ea v+\в#0"k.tةe@K)5% DH'EY ݪkQ TJzڥet?qwvr d/ű |x ] 7][o,7r+@Zr?mN b8N^dg zf$fG&ҙXTUXEօY J"h,p3,cXy`Ƙ _e}>q! SqI֕E\[v/|Ӛo]~růVRbG˷pю 틦 ~ >,V\l/@ϳ"<>l9ef ˿gD^:\EP(٢js(u l|]bξ8m`Ş.Ds S^6[ϐC =iKHSɵ"Le0he``:# DX$ a) + ^ }!X+l/s8Q"r|g{jM26-+h ȣ`~[{G 86bt|"b$,"(n,b9hLHq#X@D%߹eָ\g =n.W?Agm:I:.pa/6SSp{(ŧխ9ǔ‹O ;/X<اEZ|<ոsM!Vk?]_oڤgSA榵$0%AQxCK0io@3t @gl׬t8.vqsq>܁ܷs?OI74I>bjV(Q 'VJ"{vW $u*s ~x8sr ɪFK5&Ox5۫+ K/<6?Tz!O;҆.0`.P@d {k^j$B_CdHzZ^ТIPfh?49C%3JwЦ`@`@ފ+KTX6fƶUcZj:sv$Ⰹ$gm Z1%&Ba0:-)륰Ϻn } nV/l_;]:s yz㩳oKoisܑ%|be/c5ˌPjcEDXnoEw`#eC8wc2g~[Q.}9Nn:u(31)(up__;y-JpnuaBFl|4n>力͚&aIiQir+ 5vjX[ΑsD|ru_\3 dۛ G;º">4[#--a\|fߓ沓6OJj`x?uC|\incco?1:^]:!t26Ar YJ5F#1:Y qα@ $͌&Z`m:0_L"V0+mUFӫ$z,4z_pa32mǠ0ʺ $cGdH%2V\ΡZu*N]FtPަ,!bޔ;ǐ'#n0};EDO'xI9Y~CRg ot2҆f 6LMi{|g@KM?w~H+D;ՓƫM:H߹II&TrV:G%BR7g5#k%g a!&͋R\dZF%%8(&Q ct~c UEde-0ќޮf5!M+ 7\;DL$M͈޽LJۛ;΢0wKQZͬ_ޘyjB65 $=>;!fןtf0윖DwżwjH2!z{_W3ǜ!t%F],x#`3 Lb2s_aZ7&ţ&&ס:o5k_q)FIJH^&\DF{Z^Vܙ'+ae "黔g:( 36ͩiT"8kE$+b&0E[L,EA[86%G:ccp>a2 fFZS!d 5A15TDp̸ 6j-(ןsH/sTkفG̃ %(w!ypGOvje/45|"%SױMP2햋A1DТ[ᦲvw*[ EtLiB*j^DṲsw*C`/A{?.{C:S)':m0Trw^%wSq)M >-VrW%:D{dvI{?> & }`,mpҪ[Z|z\Uֹ=ُ~^=|) a\ѧbyiIO>|A#Ҝ䈞|qwJ|L'^`jNޗ1!|`9On:N3̆vz-ݙ4?˳ Gz ',3#(smsN8$ڳ;sl57,.Iݷ/7ҷ_Ry]:笎*dQAv*9e'd$J͆DYs FGhbcvlWjXP9kJzɐ ($u^8-2G$Hl},%;1Ѐ7_N$lrDvÊ /#ժV҆)cjUDjmЖ`&5EE.8L#kftHF9N!ƽ/&2m/r"7~e{5GM樹)(}[dD9" uj,${[kzÃ{o_]>4%u"ǟ''qxr@fdMP>Xh}HW,JjWJ9wMi3^ } p}/P]rր%tjou˭aOU.L*)\n3o RťK'kH3ݪOiMqHR6]a/#&ת/Sjj8[ uBZrɹp:' &W 7M=9/qMYVo$(9x7eDkBgh6:άlt +LK9zM-ºzud9 4;IUp.nG0Q]PGGm8PQPS ތ3v|c]L WS -LSVWjpzRtzXźmuN5]Dz/koPny'!XBݞn5%zSFO &j #cNd}]t; ~f~pµyym.RnDa_u-҆[bD!hm~_5WMӠ0_5ZՕօ! ;kX%"rbvEݥ2I 1k9*7$y9Ê1.-m5~Jl}Vg|Bl| L|>Q93>m씃c'G!r QS;Q$>޳AF@"ǤYTu is%5ݷpVVETk]j6tWJ6Z _ZS ?9v=zr'chXN֓E?z8EH8a)f FR. W1H#{1hi(T:EA#58p--L^)73ZK _]oo TC5}USW2hgHb$; +ql[* ,Pg фݣJ V꣆(Εﳔ3\GAm\xbS!#&D0FiꥥF{a,Y溣fWwncA~PYmsv"EeTk$TWJeWR>  ?#~OF<RQX| \zA`șֆPLF0{TgPaֳ)BR"YAe $eb"弑Qq,Q**ց]a˃Z;GXI&k!L 1r՘'X9 U?J!# Z"M@?  (w2 qwxO&Qi3Lqba; M ԨrO`@C nw12gPM8#,t*r`{)ƀ|Gg!km{ !\#6*jθD#}&T'6F.YmP( c[# %`CZ+ڀSC8ϔ#yDuB2d# ޵q뿲K4& Q<DFdދ. OR"GRLU}]׮VT@ՖpBȥ`l( HՀ(7ZjN$aIT3Eu1N\b:Ѡf&RipZ ) Q1k@'&P,jjLyǔ GWhtEp VBS66*!J60ُ9zf%=Jq#T6׵TNӅrVʄzHZo7&|.E&[-WD/B,$LaLcVaUcʉf>`& 3өƌ}QTs"X&) I#{ezMc{Y=}l,&˸P'BwNd=_)u:ZOez:Tм1bTU;agD q1^l͑pųDөJ{ow_u.}Hnl67sgy=}ϴso} 7ʥצ[~s=|'3ɠmaJn sÒ""H0ˈߢi*tIRJJh":ZDɒA$:KhXR|c m<%Wݭ,%FT 1d̓ {P8%_+K"IZS:(gY B`,"ŀěY# L< 36 SO_1EI3^TX9K@ 0 25%[{}ѹ.8@݋|ZG!%hLщ \⏁jkaISCBxIS!d3cWw@IoDqA~䩝dbi#Mjŧ$g "(H?*.+ <[x6ֹB# Ök*̄Ќښէx_j>^R @قӫ+N{B {J%mHx?/NQ0V`ǣ$;&,rp5˴W_{ ׇ'VEx,G~''3[3ֆvNнwn?=81ZRV(Oc5ZD1Q,hI?]jl! 2{JaR0Tv0 J( # g e~?--֍cic0MCo Z 5StHT@dAii jϻS_C%TVuр,08 uj+A2BZu2ø4V nhhCEdMG*95aZ].aVhL Q]BZVלs4'MҦY~iȗ΂ySgAaWUf_З󄧘= cd<2"ŋ/7y׍Å[/\HB trH6[2/kоjaqK:2szs#MIcq'0|9'SZ)6{}nKfc"x%o glt@%۴y[haTD|ZReDU@%!*IG\r&d\:xݣ цB20]P45Aq*c-#Mpb&H4O3taҾ4Lf 5\N8#ࠈ6]f~i_@b[5NdX:)<8V5aj"&ԕpS0[ބ˺zBʃ7c#*H(htny;J:a9[㧊.6̂@A.Ebt`V>:@3 ('sʪQ- QZ;"W{5"gptypːJ͖4{]꭪ DɄ$G/R hcq.)k|A)mH@l܂ ],̔xUՊב/-"*-GM(""`FcE@ְ~CGIO F> Caa1: #GNcEʻ*wP.(7PKxG ],@-'B7 _e#lqln>l^Z;.xqrPei5*.;Zb'=$A he26"Pm.kh)0c d|Sf>9xPJnv9Ȕ&ħܮ1^I˜8. BVאo=O I !vGea%"!*M@D.4fɇGkGbQ=rDBU@4K1x4(+wEaxcqH`hMGכ/PFO# =X@a%->2.GftB2)My1=<0蓄jYdd#Q mv8 z6)P5ĺAԬuNg?e/@O~ӊmbi ̏LihLi0Sj4*'S.2U3ueZȚ51!ZNfPU4ǼSa6_,3ryqa6ϩhF/cW=jϵ|LJ͸l*sl.N$)7ggw9ODu'eTZ!U[!&tiKwSEel ߭V߿o//f~Bϑy>݀7yaB[/p= iØ ~ٶ$5?kZAHjuWgji͙rהlʀlv37[w3t6uDoYY>8<6qB|$ @|-(e tOI@_^7yC~iښsvдBn~o5֛UNR:vٙi[YZ="_"?92saU?zbu1OCʀwB~7+2" "-_JֳD_q2_mR?gPeWa:k]43W~<z F.4|<׶礵=i:`q7{v,E3<}X+:;,gb4/~ii^yu^a3__0IkP* I& pxilNPMrNsfܼ:@pElpy5eEW\-ogI=8KBMj|f6/?y5[ì_8w/|j%}؈atZC1͐` 5\FLQ+lu8cZ*2Yz?}9-oILh+'!ܪG+u}\U?ECd?~ʥK %cI9X1ʞbHO>>^ov ٍ o77$tn޼QfKuq[}ȯfQ=9M~s%cٛ7?^\-f}yYbtkhK6ۙ'U@;{ގO^sK'ٻ޶$W=m0RU՗~5$'F_!dHd[}H-+(a1EuuWg/c價3E;ʐ(To ro/><9'!PK#?Tu|Vz'?H*+|L3K(?GJ@FZ0cA֎Ș'/=LjUތN~򒘑xq?ޖk!nb^nV@לNw7E>ۑb<}ral8RWKk5TBG,n1ǚhK(Υßb9"k=ip'żbˀ;(Z 4%k|~{$8ċwrּTG$;S-:_/z~W/?{|gF|b8-g3A[[a?ŵx74z#ը7a}sn̿ Y]n~kq^S[ۡVe/W} e֗><:j:*2t[j$~ڮO^vfx>A&a:jaًQK]XY&\zA [bvdԄ XGm06PZɗsk EZІ6jeEЕlߒHnoaTx1x?zBRUS6x')uHL5QfXe DGt{*T@EM!R:P+N=b *͑h qj@pU 곓FQgj ".e F$ՒtԈ@2_ .59@ov5 9+ CxjT֘M<ٯ瑛ixօFc6mC,png .Ec9-]i=.}d& 3I}=$ Zy)h_hV iF$)jO?wUzYgpnĆ{׎X;wu&ǭ]]`.Nn@0 ˛<ý_/~9}VvY{y4faˣa[>A.{̿hh~GEk yWx^v#٧w3uM|7y{28aG[95OwK3a2ےݧ52nd=GV|h,&P]^L(iSA$h&L@S5>`+j  >lq,E9։`׺2>DdȫKƍcRN>'^?}d)82v};Pl}d2N` HQ[`8}fӿw,nR 6IbsNaӧDA4cq B6SDα3$&6.>Rv+{[3 AgEA-' Ŝv4Zm%bld%H.8-AƖIL" _mzMo"ԔD!\*6HfML(kB[Ȣ+ }+5.glJ|\ j,D^җ_Q=:jp0ɺ0aQ:?lȝڡé{=9YG2Q?'SET R5!uߘ)2 ɍ`69PYHJe_ 3k7 % aC\`v2u>!} Q)j&}7cVC"(ѧ` 'bl dŷFq>1j:. _C(sb:CgFoLm.++ 6GCAoj4cչ*5{B s<XƉ.p 3)\_Ae. xLP5O7 yDGjTYޏd5;8[m}ȹ7w#FVV7Kyd^ -F^#ʎZcls$7KFNݏdîѽf=Xs~J%Z wC5UH9d4A!m5LEMNsl]9D@–P1M4r*h Ft,ky!0ɮkUyٶ~acPp*ES6a+E|G R*kMv*^M z@DmF6bebAyM\d)]:qf yGjv$ATg߰@MhF6lbP Mt7L4 F}SHCְ~WyQl0 Nc>D97K Ʊ`)8g)82u[*;Rq,P{ީfq,a4:ܺϟos-e-?[=s~[z5Z|~n rFWJE?uWhmF]u}~WX[J?zVzS|<y\Y<"^]>SϯqŮ>Z]>s~#(e!ܡC~ߥ>mJN*y!hr3q~]+$|V>*eΌ[2oFӭ0N.W񼤋}8|5.~⻺ 6GCpD=O?'_mlZaj= V+exq]ks7+,}]Rx?T׷jn6qq>e˅06aǹ~CJD`CItbbӍF8=_"T+OM'#s<([U(ޖ5wz?g&w-1I7 'vK-8}+,37r߾ /yAhUkl֟>}!t;-'=x¢T jx}Bь)$Bٞ5, ޖ4+Q2R$an(QcV-*= >l BKAXpYc̦ 5!E"*p\EA zv7Hq 2\3ʑ#ZRW`5;LsEs9af.,?k8YUT䷁w[/qݡw`aX AnUEs'ψJ+!%"y^̆l!Via$"31RP\%Cw_߽}>P8{%|z 24:hfbEw O"$Ud0!&9)#gaPG8ǖ@ i坢st.^ )'V#4x$HbPTz/C{)ͬ뭚u_bgI}q r卹m`Z=̆:}njk#rޗfTYxa/t=UZPQt J8;XT,#m0%=x" <|jakHp^ 2`H*Y*1qpE)Y:,( R5(!J&]PIqОIEM:<[SLQƺ>b1N4ty&^F+pSܨ Z5&38QFs#Z_ rMF~> Cj)yH['Ϋ,sD""eLiKL*Sh/+-sXbmrY'(UH?wxc͵XN 7NA# TYg$^[%RPdQ,|퇑M!\f. Yq[,]˚— X}]KH_9!_7w`TK"hix7`6}l"{ |%h8;El)on^77 Dc-+GRjjm%Puћq1C[a`ۓT)A8[AZ rbэQ1EZl$RKTN @ค4gϹUS`r' ZsŘă`t7V^kUnbpq'5rI ,G9ύE[bSkBHyW^iP)e>$ց,#f³CmxB9,u+nc@uh'rԹV-@`Vez #|кgrwXmb8!内j89YӌrN9Zh$t #ڕ|1hk#CPCPpĘ@ am:EkܵH;}5HN8 XmlCL$tp4~ t~=YK}i=岸$h`BWhj4Hc`{ ٮQ#G4\Ӏ Ahr3 0PBJ{{ ưp+JD9\w}JU"莣F&sʚQU:2VU’Đ V\2N,xިjL9,YXװXK}5ww*]wm0`Oeiroa"G`2 mr#tu@h8=ޙe~|sBTqn_M#.S`>+'V ,L!<ɛɗL bwv)cB6rZ7޷˯ގ{F|޲f'cնPѥIF%JmKOcm{eѥفHÆh29ԁs5W8S /z?6i44B  j"ai $mxmExYMgUP1 a 4KSB]Y 5 ;>]E%oi9I|ۡ~9bkD"?^ϝ+ @Nk'o/z].El3V91_ztr[z<KҊizki{a]_tfv9p*+/WQo霊R߱yUhMt-$^/x߳.L@,^|~n.-J }K\FPҐI:U$·e5SnĨN;XiəiۺQnMh W$=mFIXT bT'U[J7K۶nő֭ y*Sy0(+z`翿q'J X5GS"`6R~E? ;v|Jq^X%i 8n+TAR}DGWh*.^1X_O%&Om%UfS>f8Ju WGlIJzҌpyNISsZ{A+^V޿lavzeGe]]0O(g]o&U@?’6aȈz/ VĐpFjF}~d sYWR3ZTܺtn{c:xry#Ą}RQ%2ٝU\mG&}dU4IG\IfHuK FuRQźu|[:%eukBC^&8[@C떊Aꤾuru)նuK/piݚА:%"uJ dHO㎞;miaiQj!-R{u5zXŇ36) (oaROֲoK@RH7,{ro߿afMih;DR"J$yOV \ d3s5(ySXCf [q/kIRjI*u$IEYB~|R6Y+HPXa ׃XV #A;hAUXbHM[EL91\f& #RiBUƅ@ *2lƭ CݗBE,؛Zܡ+P ]R-%>2Ô&")%202# 8e!Đ0E"GPӀ_ǚ3=އ:ySZ)\! 灖s\PDb(aЅ,ymY%'Oݖ5C2@9\RŸ<(X~)r1aߘbsYa $U2L0$ 6kgCuWl2`8+gy&].\ÊWb3QD\*L+]L}};|=@b g/gRZepG{Rk T@wWRkPAPAHfsĞjϹG792HO1MBoÿ ]5=ԀE~h#^La8YoWݟ߾=$CưO|0Oq*֟őʆ@0:Zu(U+r"Mw_#jʣ*4<7`=tS}' hô@RԜE BrNnX!j PEܔ\"t1dXW5'%x иejŸ(@IRT#[goY/O]e%_CԵ\*)$䔳ЉǛV h]5fM!7F!P8aj6nS@!~&U%ϡ}KZiFjk#PnQ+tZ-`g B&YgrR^3Rq]oZ(ȗA /M;P'ugO9xpC$uRU4I[Â.I}1bLM|G%R.iɕi790Է y*St(uue:.i/1L|G%vcrd.RWwŧ9@RRemM74֟a|1 $gUȜբoqrD󠚎p3#Y4w|Bsv| )MєeP,sAgAdR ߴHY7tV _ d1=+i(dTԹ0FeJb\L/5bD |T}6?|] J,.c2VUކ s$, \2Nlf7*lPamV;њSF6L$em_8+FrmȂGʂJF*$(%zk$QH&I7@ j.3@tn X,49Ðs{2n3o)J9spDg*גc@%j43ͥr`2(\#w9B9i km2. qfStPD+*r'hJՎ|_@j}XrXf}bϧZRɁ\o@F  "]\6~!䝋*Յ:J vERB!ܥP.E;6]eE&a`| e 7`/6]"坆|ƃׅ]Fj'YӏրT@u)LFF&#9B80$_y_|^ ud_f`PA}ol$\+yN.H;3/J{%I:$g5ˇķ3Քcg[b$jJFUP4'&Kbj,;p%u9[fdC$A g1qIJpI"$!PpjbJ:F+7g6Z2#AwmMnJrl22.TY{rT][.^XE8-mP4 ARXI u?1pE5RBD q#TDGRs(ީj7W}:!B)ujm>BBEV-vD4T"|L Yj"8ED$ҙe-ĖMcaU0[g8We9153 !ײ3^_ _ nꎨBn"Vk:jMCx' = øAġ݋7qu кPk6b;ո٪wm~V-=t=u'N炵s_>8:'IS$L }ᝁÞA+z*ZM[NrDh:'o=u%~8r̷FktB<([xv&R,&"~L'ƹaM "IMj UT) s m""fbgi@3 WeM,,5]sC~T\6) Ҽ y+K&y Lq%fD,M!)4 '[#<)_셔m?Sm=RnriO~N@`tmG z/\v|19LScE0+$aQ$T"KY4Kwip9Ivւ讯*<3`4`H;J [e2#ALFkؘ(2Tӌ҄V+K#͒m'rYYeji4)&4vKqmFN"0&=rq`4f<1T] 9F2/yY3ܿTP=0TD*/~Y;=S4ry.\?lW:ݮ~gU?Opt GZG_akzBkJR$)]@_j׻ߏ8 )$ԲCR C]`\R9C1G9u>ܢTޮ M4Ȧ|$Dj- }Gvu 7ڰDlx@#m -nN;x"$хw-<ӻa!?ؔ4BNK=ޣhNaHp@z9 AO\C'*Ց#OJvJs >:'TPOJInTBaTh3' ?OAJk%c#&Hsa$ MdR+='5 QBdk?y@⪡fvO{Ujtb~{GN.qx肒̎nbO M>?/w/^P&ZbQ&t]hE>Tl9w1tfgm"d,GxWDCo.Vra¾b.f} I997̓{|Md<Nm@8 ~ΧQ=Ճ6|r+(K'^YP`LUp{EVI~ZO6wwj)Sr$"5ZAb6lD:3(o]@[[[WՕ˲upU 3]"d ^$86*O$!#szkjQʉ[L_Mzy42ÿy僕}c5>i͗hy/m#?H\9/{@ڐEQ'?|0cOq[7(sDB.6Kh{ 'u'D kMl6i &ZFM@d-t$IRcl3lGFڄ NiC`;lZr}rGXX^\)\jCOVzVw %AX>/R^8YY)ǵvNDX~/6J2p}H+egԆPsңR+g +?+ͥ6fR0~V &K l,}R_6z:j+sB}ԵsVJR_6RgM(JI}YkuKJR&/5u!tԗ%%dmY)7JҕJR? a._9+JsSJ1[8'^Ů7|~/KRKB '+=F+eJY J\ji/'ʵsV0|_NZjRpk|%0|)?+ͥ NmYX gS\zV*ωvЦ_y?^Ep{}.h>cvI_˒zݲX< ,+žo6JkW[g?]Q:z;EP@]PIaO*yR&TJNT"J-勘R~Q)-v) ;V PGNom;}5۽$L=IUl}sQ ŊŪ8"idL# E{17g.r@0Պ8vUO_޾I)Ҙ{K9;/9v=uS**?ԟ;KQ;ԥ$6dT׿;>byW8Pݨ'][Sݜ"O^v#s&\|}焽??%ֲNsCN40J pWMUzNt˜4*akeZsJ̬$$p!0TLHfThDBQ ŰSE*)N`4`(v1|&{ib,)iFA _,| 4(KY)O9MHgf3=o_nu>db]=|5YZMSL6A/zڎxs-+&c:C"bG-呈2@yiwuD,-msO)}opwz |of„I8{1D*DEiF2rRIiJΘ $D3bc qmRU 'w:V[j)XV˻__?M48>d?\~.1[kzr}w1f3BϊO}xrNe6_5fӘV|g5K!]_@&*GW|Qy+3<\fGk.onYcUsIyZR ęc~5\K}%5~4XT'UGd嬄ʆ 鶰yq®]= `qgaۻj0>jR46cx ] J աI>CFOL 25@iia68"81tyIs|Mp@rFW_o[x\lṚzx+\}2MWOFoOoVϣ9MزpjGjÛ\u޵X->>ҮFQl&㝺,]eM:׹g&8XPzl%{]ϣi+. vFCd֐2Nao/+hyi}E}K7p^^-by7}vFn8rNş4~~ ׈z~wٸgKv(78dDs%'>ό;paa\jdmQV dfƯ>(Ozf.*I§)jARlމ hrpgtPJ.lD<=RuD)'mtA‚YOJKL.>4ꐅOʻBL&DkY뫤;Bq&1tnﷃ\9Zε #d3mz!k&uݰWjkj(c s_]bR_KHk6kb'5'q*[ujS]qQuRxg (F9;XZ@x699]GCsvOB;x VjCX(ɮu,RI|.\jq-JF qe!qGpVc5lrK c<aE"w(^[#:Ee3K3`t1/9mj =]^+bM!Q5bq|^>׫C.X 魝hhOE'UE#.3+rOw?8ؙÆrjvQP% M4Ȧb- }Gv԰޼[xwkB~p )dwzP |L'u!WL.4һxz6,7gZҗ%G.^ n܇K.;|rma0b,Qv+ )i8|p(ͪjw%B3BhtfShS@$d(iQC()O؉D橬hZ0W[TSooV9j:J mg+:JKl@&+=G+&J)+@JQ}Up>Yy[biVXK8T%f+'P ڸMRJt{+n<:y8s(,,||Y8Bύ. d|O_8n!-~F?6SzU4۪ji3.u`Y0QnJ<( jNCp VJ,;J`IŤ=aٓIa23,T0es(k6 SLW[%aYRPy1U`wOg'h;]QE-aM|uH7'}@D[\A&Pȁࡧ"Y+ho+ ]7j $'/ +ᨱe"8gNz>@?"4Dg8G/O[6` wK6+ML,,oγ`}"?D-C"pw_hxT[8,b`sKdrЃVR&SWXFn+~nb&~P]9wZFV4?s}=:uDSV )?tGM5k7rEQ uHDan\3g<F)]7  %1a#!+~] TM2){Nf&~TzT~}UNe$GzRkIԷfJ|a<,o+]0bUs i w˧RK4Uo5Xq܂I^mF;8!SkF\3("G&0-[a3Bft7kO [j1^. /cfS2QV.I&O6FB.dG2x|J dPGb hq󤍔 ’A3ZDY8?*OX_U Mnz?yg^ 5#njZusM !Zh 0#;b:F^ 2Z$P̘R+Y;3c@xx^K^Dju:")+|=IE +:g`!g&g9h VQ;ǻ qJ^$r\̃Aֶ,2f&˴͜aS<w*҂-@). ̰(@u0*\=*+ &7/2N&Z,2 OxtؘeKB5wf0 R{5>+0mk(, 72J~d'5x{rWFgkʒf88[9v cqq(Φ: tAxJ8N9)Q8`8Ae98wU1E\`cb,TkY)l*t6SG3wܤY)/;qCRnҬtE4T&n ᣰR-DSR+7g+U+~I1LJJᒥt,-8`8uݯqw.qݗ"h~.ҊٿHU%/ppU瀠ҏn`uAUYءJS`J( ;Ԃ4\)ժ TOԮKJCt`ȴg =+LW >zhW/*YqKA?I\bhS}m۴ffBl[ ~U_"԰+~[:76ۻ/0_}8j;RΣfI1 nnss-E y?/B'-;ʜ'v]Զަiׯ9oC~<*I{5)ܘqp k4zTx$MtQSmDꙦN?;ը/u)M\Q09PZOo墐Hg94 uxLL)zS >#T <[2YҒ3 \8ˆͼLH,&O|MF /2Bj% `9#,KsK0#:vB['d߆"PW}bWmK% V-DlGHgO zH?Υ,oD c1h"цI)2r 9s_uV^9HdRۄEǐ l\o?nF0!_'D&0v{Ձ6#Pkn9W:[ý:RW\9C8{$J2rG:{$ӬTxwJ+=DU؛nҳRӬTU8Q}UYdgmCv29+?RoE8kG 9XsD\A sZ&i2"/ S1 )]Ȼ"Siyehȕi5!믜ݼDzaPJ1YDR3,4wBg|-F|㹎~+U)YAʼN]O/ |wi*< lp_~x)Y9[O ** jjMuɅ:䯚P=Ms!UHoi4փ&H^ZH'B8Cz:9!*Qm'Ŭj*G&*<kB{,081;Qq&drg x P췙Z$ٽTYwa0Fp Brv9h|4!d3I>&EN{ 'I(+ai:j[_ '֣.gtwaQ7O>,\XfYLJ$+vP'.<\Jn3NȄ$A3E(0c 4ni['Q/SΌMD@+𽽛IY`p䕮g=STâψ/,}ϊ~- Bd8N,]_?t-ޝ dAƳ>|c=q1+}6,?٧ "g.'ްj. X)@O/C q+c(N쌊,ʢH;Dը"Nk`$|cV=C' ҉.WaԖ6?~^s~p]Ͽ\{yb&ēeaϏƏ?ٖkB+{^ڳ{Hq#H3gRY:5Ӗp;^qуkq7kQD<~n+nqkN3Rp!InUnMXw :@^yY lX&f24_é5rҶHAkc^'AnB/\ԁj#?l:Reӄe5y"|ǀw"̊iߧ{U'68VUwZVNYgE;ء#NzwJ,u@ga[%hfJ8ru:åzSi^>=roO@,},5zA2t ? `Y!g@u˹\"lu=x*'J<=/co^Q]z)I*vj!!Π&I9XE(fLɫ5ՍO.[taQڌn-+):MA>M$ ]Aˋhmi$4Lo7w?F<'fÏNί.ͧϽ HͷK[a.EefS7nLU&q3 3&ʌeEiMq*{K[&/]Iw ttIp"C:ncO3euS FX,ME˸S7PF׾:{F7lJ%F008OJ2" e,T) W3nP4<Թ*5&UF@Q owfaXqz;z)t<@X) O R,T9{Ԗ늋LҸ5RaƲ.ZD4Ñy|uHBpM^|ǸJ[UUkVIh:Rrȩ,J&<+4O|He(g *GݝTM4DVy?MY^4RN`RUZ*gyaiEQzTX2nno_܌P#*T=%2㻧b}+?뽛r[cfNl)-1;CqځFöGŸ R&2o(ϼ1fk"F1cx]ل 7%D8 V:KG|f@I Zjc)O;01Hgbz!Rx5Ec0.ZWzGaX[xƐw>>8Dt+jTOwݖx:qd ,^@U+@ ~)߄L[QsejItS:*k|%ByÞK-@H"83fT ֱa 0ߚvDBG:KʅaxSTۊM)BUc69gy9$=<\!Z 69ϰ~zp+<ݢ p;10g*A7W'Qi :B(I є`q@7@j 5$5k[#@1Z[&:No//{Jwl kY cVKhμ. WK="N)oyl. b9m'pԫna]e.TwITpJR۠͌SDJ%bpoLzՔ Qz(U_+l[3Jqůb^OV|g?b ԫFٗ6J9PJ9Fr0ЋH(x*Kܗ, Ur%*2%,wNF6AUg`Ύ%BȐ@Sa~yWIWMI rzheKaͫMVvaI$"6 OZd!lDq$}.UÙY$Nfloǜ'm,F_Ozu^c':/ɘŘFg,;֎ th^%)͞}}2(ӑ{ ֺOy_?ˠ qASh\ڢBP+=x*}ޭàؙW^*̐j]F2ޒ5I7D&eE4.;0U4\w?P0^4e*cn8<#2#zōȝt՘h vڂ80AQmAAS0`F ֕5iV2 WVy:*%XdS%"Z(Qyz&)V]U{,W9.jTڜҼR ]2cXiHVX Ly^Ƕt:T 8}:\)yʰy&+Kpˬ ŢU1Sfe"aXndatDBP:|/Qœ/CHT &|U&ty{`LRJ[x!JdJ (eCwF |{z}^F7qMÄ6If+i" \_Di> ef3jUfnO%N8vrz/XwKA C{ b |}jw8x+wB$ȅϪ{N)&}ػuW|9r˰a]cx"ǛLJLJw+?O~b\Dv2/g] .z#>/DIO-C-F|m-8X{TBW$qZK7]Çqv䩾i/mW%1*q.㣝t¾8&N!Ɖ1}#h} W1 L &hgGTWPGLQR ش*t2Iڪ!#=< dd ym'4#8b`wdӃ{bA\-E<0W t;]9NvX;cF]"#.m Кk}~ī}翣hiK5$5#Ш}:jKبǨ1j-Q( T%N:Ĵ{餭x:)o @o}Rs^elXѯ>N 𿈻瓊n7)zcՕ7<`f"^bg]LgpA- 6Rkuz]!sR7%k3ɀRv̀!<6)J\h'x}}J(6ӌ38-`.4'܅&nYcuSXX7wjOF-7;ul6[;lIOa+MCҾD!WM [R`(| i-6*h;QJcDç('R4z3JOi(XR@@JkRh(Un$PzHUSjk(=i[;= zՔZ;R}HRJְ ԫn_*Q ^SޞQzHUCjd,`Qz(RaG'CRR#WSD4*Y<4VCRR[g_z(MC)Y5 aWM-$Qj.ɗ+zN>|RUחt*_?tN尰1St{]/۠ϳ S'ڰGPaso _|X?\IF>Iu6I@)?Ia$t{<3v1 r3qfÙI|ta|- cNvm66C< 奍=vhcev}MtP֏絺MXmt7F ҁ('ǰA;X5GILU¦̲Ҧ"Ue+uJe.JgXi 2]*UG9d/otЮ5$uŞd-!S6+5+A\&ҹ)a,us%0?QU"$PBk⛲+^=|}י7?kD11kamLadc!UULpP.+ٲ`* >a)1Ijԉ6wv~'; Q&1Li oc6:+Je tfY1:P BѦ;ʦ|r'wgWYZ<(סD[źߪ߾zz;(hA+3BkzW?g_zݙ`/ӟoϜSXǚgԫVo/>[e"*QxIUsۣl{W;Yz&isOR4(l#n>WP~uXUSj f.e=+NnБI0mH|K˗rQsem7Fw1H)^D6,V T7|(#pb11(u#6?ukEh7^ =:1:1Z ka_pNJI>xQۤDyw03 xZ$>EJO#vf}y ܆w%0>%mĦIrϗ.(K]$w]IY!GU`g ?Zcӡ- :۝-ږ[0eB[S>$_\fg|yp;W tH.cZ[RjǍ v`zYѻWezf~n\U[Mޝޝ_]fO{Vͷ'YDZX(ZZ̛ă~YsDŽ1Q*5 kugF)8 PwIzՔIg_z(jg1r \ۇ^mImʓRni(5fl(喆R^3o2;9mJNCN\nPڪQJㇵ~X{La7R[}iR{3@)V,Jct\}i`˫˪ͯ15~Ft\?a60ADr8_8[?|nۂ ƖTR\/\hűkԲg )_ɳ5Ol fß[~f-d=xtS"Jd_6 Ef;;ͤ@ܳlnY՗y4h5w^xt} jٟf ٫WSgLYITBT)C/4FS qu$N;J.]ݘul܁'?VPNE-CنF*E5d6L *g0gz`ltp3-̰1wōLG\>QfT`WNp).W*!Ն[qZ@P^RY-X2Q xvOQç9/;9,'4u,zcЧuO/9ʹeKܮwzx']f%Yr~W>R{  |zpIn'#nD%)Ķ|Squ9РU4N H$zefcID辇밎u$ OO&0q )O; ~7̛!M=~zr%;Ц96u˿m&RA&%bu 9,HoTʩԥR v* C1o؄ZNJEN*VplR]S9qzA "iҊj)OEGi) (.p^fޣc]"К[%{Mbiyi9bBPLFXRqZЀ@Y*-%-19<%&Dj_JE P I[&ZK ]ln%]$ ƛ:Q,hgQ9R,j_um?MJ9T&^/ϚNiU5+<Tr6漆zN* ZsH0I6pS(+݃Ic<^na:0f)}jU |pz:Z2`EXQb jLH #U@tD$j7$I>֑}50nHN\ui/Hgux~N{@4 ,2vSlW@xbe~:^ v=1]c$;P 9fgb2JDLiM) ⬶hwqdx`KIJ7ĂZ21Z^ s%ؒRK.FsE ?qŮTHΏO֓wY!@:<CMi2J[ IՒie%R:R#9Х߾ @<<3!UA'{&@ ts 3 8u(NHL-Jh NKiG)BؼL!#'2*S)P'QXuظ̡Z==1KGWW*﹠3$ %\{LT#WV[HvmwPO? g_?~Xm {Ğ=v$MO+};(>HLP?LE*hEsa$vkv%Q?bgpO?3¯sΖ''aȫN_f]LfB@^AERJ>Co\D.kBfc0H7A U!W(&o'~NRK}s/ﯦ}2-g4Xy$GaՠE`Y/aJFC{8@z,WƁ.lFf IYĪ:nZ@G=y}O3~9MZJhq(-K4 NP$%@ٜSJeen1GnlpQc Λ oE2 JPQs9>t6Jd?qzo<[on>vv @ղ7zTH ab9W-/3;ll~Y̰hTKLժ❞:F 'S ? =ިUw@;x)WO\>\%O~e>fy3o&OX/rxF+:/7v^@xo^Fbe%3:UW~ǫ6| -.j\ޔu!o\EtV1moúhry:cX2^@nɀ֭ y*SzMԀe jc#w;)n%[USYDw)kއ)󆭘$P3<7CB ŸCCEԹ>pgޅȋ d瓿\lC62=1%pyg>}Y,ο!>B9A7Uq=Yw's>q婼aL[7շ׋ &3-op`f7ۧheoJ7ƺz6`}_}X} SEf@9uH[8RP$K6,(.r+J#]R+2iSű0i ۛ=7W`{F_/گgt7Rh% zs@ WN9~<g>?9 mbC8-bl/$sDc47X; ޥn Y^jepzw70Q"Ptt\"s׃ {9i%-T=1`_o2)[EJqρGNπqPxr \P̔Hj#JK/+u -9.5k0 9PlD_Sn3TP'BVMjb+;n+(,܂VKN82\r6'n$Za< #*OD(Zah"j.Sܶqż鵿9U{\ &?7^`lVu6ޝ1$ύs2Vnps[˧v2:韛(Q eT+l`vc3{Mi[<4MȺ!IN#1rHq)b!]hWrñ6Bn©K~X$;(c/"k( I0")&& Fiut"QOD |upD ~&.G'-\*(AG023Zb_-Ì k\%F")"4;&h*^c4r_5L~% S-nƘ Ӛjz$2D/ȼd&7VWD1bްc& HszBCKd2czBKBm?.)yKmxV:V8~WGtneYC~G,r8w BsGՈtY*"lRi)i9"w5govLҔ '\]_1X%bG̔t$Vb jrݠrZ»w1k+|~@Pi'_:J_G7)?WZK?} F($ ƅk,/'MO? 0TUPL:gf3%ȟ'?‹vأCFQ#EI-GQc_e,.eBH_^n~.CYdjdt֬xV=Y>nK?7˃ {#l<>6wtRO6-{/Du6H.Ŋ0}n9~*mjA][(RH1J ZD1T|{TΫSh5T%5W7 R4#Y;YO]T{o>׵"TjBь%w7$[ol`h| ! y$7wْW 3Ԁ^juwW!lcF4)?2[saalYdd2mAjD1ﮈ6#p84ɊRASWféhPJ])OZBX@.oVw/}wKC֔EZU|_UQbӾ"T8xlWP8 #&¯={Uc|P7cx$id^ 8ډP{m 0Z]׻YpI]6G"H5!.J.0^v)L=Z&F[Ȧ9n _zxIڒLWõpn[8\褅sFS_ii#4 :o;hpҬ7n H sK"&`L?]k 4 ݵ5˴>6h.0J4in.5Hsp jyZEKQpEqqy6tr Ṑ`'>.j}iK 8+;0 +g~9s蝶JgW<|VzL&JOJ#|V g><Ӷ8rWlV@4+rashVԗROJRa$ԗRFk䳕JRY($TRVRsϑӶRj\LJU_IM OJ]=W>d==#Ԛis= gbտIIX`qVZIMJwO'n*JՂK`s?gZ~i#;j.QVO]q>]_-Gӕ"Ji>=*LCxzq!#c{|ՀzP=·-!#t_:zR2%͹pb<2U,BJbYjέtQ*:X@xC#hp?J-{OS!>@g_ħy'kwGOc{{)8"뙯GjmQю+ mq T5m|BSD팱Hd^prFftPA+ [!ŒJz=(+m mB;T”6'$*S4),*˵JK*unrMeLh:!-$har-1D(fVS75( 39d!{ɂ/ء>Kʐ͗A_Se/(_<~'a #LC')GaR̐9VL!V:eQiY`+ #8(P Vu5EHjUFz ~ަFW$:cGr/Vڇto3 ~1.Cbyg1 ܽ/rc Fj6&U} IFneE) F&INBYa:ttzT͊wk^۷ooY౮?\/WSy}Xx`jo/Ë,|u71Wnӌ'|l> gGZk"%ïwt|Sas݇;xf9u0JqfR [aioc{ЃkU!khCQ8t0ABT:>P'k-: O Ck/aF|Φ|CfIRY\jƛQ>Tn,52l&u^s&2XG]H-rbRO+HӋ@ڴ6=!?g|0!aj Fg{N;_.iԋю!}Hhǐ; 1- 7hϓ?R:4 X :2ۯxh"3E",gUɶ"ŠĎ(cmbް,JS@2ԗRN:$RhOJ1mmFi)%8Q5hJU|$fx Vsd{/ lDWF.-Ic5hQ*~\Yz%bNͱ1%[CUztcIUj 4n YJ9R:5um Vn4⠙%f F_~oX.deY1#ٜrs](tg|V%D"wh[AY8m (.?M'@:8,NPAP+u꼺i?Ry9^2JOڿІ󞹏kBFS\DA,q6j4Jh[2sYPe"ق1^3FNY3ǹ!l !wT}ܧf & nԷV;EwX@ Ld(,jKE~%e@ˌ1EPzioWcqq+(o} S?YG*~•8u |lKL) )+et A̫Ʃ"kgfʺ2 +Rt'PeC$5( r~A`^׊H1KJ+c)Ȋ\YC2UCd$3A&B<+Uiv-PD.DihĂ3~&/Ʋ5Uy"[&u[e0B8 vaAq>0jd&4ϐ;a1$h}@,@E7aM~'qʧCٍЄbh~g " yard^`~fŀHq3@$bRA`zA>`TJqlYc HO{QbcExԓ*n͘ >HjҀ*[ujQTU-m7*^==IUqFA Ϳ5i4!Ќu>Κ TY=!LИ#4!zkn,aFՒͥC)k{ `d!SR$N&}+H1n4=`rG\ju>zK%˫ C^F^>ʻQkcֆ !Y~ o<\l!q2ߛ,}3{b~;V]7@/(TdZ6UP*}ҵw?ˇR)rsLz|xWT ۤ ?eMYGMM5}0`hݴ2Snm11ߑFܮ(ZÖ[gޭ y&ʦ@O>nwkA6.~0־!һua!DlJGڔ^hT{}%b\b,…qrg mu^+jx>,<&嶰SNtŽJݞK%HUb7cvU,*O GTJ7rl[6~Bj2I14Ot'߶]bĒr=Hrs97Lˆry(E.Sc)"\">e.˘u21lJYs~zWU^\D zu!w^[ֵZܤ)fMzd}b:Po{Mc1 F-!\2$g %JD^ aLh ys[:Ԍ)0mt7m PT~$h{hEX/`}6`l[_n[.1v]0^_JX7-ij#@a>7y/h">7'ȢGu}x=sraػvqӄK-[.7!Gr{,JIZt%ji46v٨d7IWR:tv'P1ء 'W>d̔Cളd|4Zʁ3I9>Z:d~}/~ azqc*j,Ϟ?{4wӶs?h.1vN̈ݹQ <`fkTF[D>T"~M}515DV\72aEAET[~ ƭŇ2nmut~B"4_oT+jYԷ\mMVPmijӑRCq N!H15h&R8 Fj&f&Rד\L"гLbA9mДX^hMRs5JHPk((jӍ2_*Y)CmQjCȆ`GLT5 Cj̡ҒzkfVTk)[- Ρsɐ-|k$._Yiʀ#;}}?_O>ͧ䨇7?-y䡞$)l%onZ((1`qlO9Nm\j!!|"ж! U%i[8ͦ~BӔ Gp,Z91hpR3`5WóߋcS'x77Wm;FGN]bSxcXo#_)GFE`2~Yi*[.G` &XsqssWBP @;mЏ2ๅEm QSIKS/ k)*juW(*l9@I֤Kh==䗛ep ʇmKkI+[DK/}h@e%bvNtAň _ď/Tƛ/Raku:&ӻ~/RI7.O9 P1RC5O. PMbЋԕi̝{Hxqɪ{f %|Z=h@s'LEJLi8.>^¶/x谄G@1vs@c}ݒ飯'r a.=0?9ҞzFQ> 9bBc&0K(9A@֖ ͔u-G} ];JwĜ ~!u߶`$j8f}'xȇ}8/n]K_sZtzPO%ɉukOqfRJ䓉A+@>(-G@kϐ:yMo] dO^Λ$qBAÊNِ'ɖ ;<5`mn6'UySփFrW|~a7EcLoÑu]t>W <-D "4P| >vo>h|> +riݯ Ko~?Y>Bl)}~EGj@T<.R *odH- `{?cMY ךcw/?0[*=_^_Yce7A?wR?ܫ^&5W]]\Tg$mPQQJ(H<ֵ5:rִ ֣8ǁю/ͪ>mRc~lMk6Cr~&k\pu?i ,<,k :;1ٹmu2 S21hla@/'3XtpSQ fB׺F uLZSWSK!Px!yF߶D60:gLΎ}c]]\H15'"* C%8Gx95=C`P:b{ nQKȘǪ|OQrĺtt]ѕ[Bu) .uɉ BR]h.9AgA )^rd%~Y5K.e%IIb[(>(KǯͯSXۣ+^41%^eqB;+*2ey$#%D9eɇ͐NѮtGG#HCfܪs6mA i=$ICH^%]3Ʊ$]8!5އ@񧅎: J''+(h j|g_)ۉyy^3IuInm\pMٴO7v*j+(*Su G(*jF9󍋟QGlm_K/^J"ʛUn.|Y56YmYۆs5?>[9^+HZb(< .ciU؋8!Z&,|\‰~Jf{veZW\}{j*RD>}9ML1yuYU).TMO&GFRidn֍|\E$KkXYl⦶AhS/zInq1:$*JGif_,wYPtwxA+=LAc%B0_jV8%g_{B4.=ӗ.E(g3Ao+J{O1/IR9vEi(BDY1seZLgOeU=jWm=wlXbASm1\Ԕ8RXۚxL4µReZ7ANjrX$睆$:}E7[)G`2b@TEmݛwdSdu G>^jFac1bݢ~X~ .d]]7\E"Dz078?8zdQ\PR /?io^_yC`] crxQs\~hC8v̄h6*ذC 2]-aw +hm5BЕ {tPW#3d=vI|fgrOI,)^+4 (|2#FTOOu9wzZ͗6"kq7?ًSƫ/Uz 7oÔCJY?P͐1poC-Tp}]/ 웯Te)ݻqKUc᯲7_*(@HG õ8RՕ4@ECoOjUyjq~wWWa:}Z1?vc#rwRkgƛ/Q =ӡX $BQ=x ^܅zU*2X7IMuCb]'6fQ`W~mLw?@28;X[ojܝWFdZ/qq>Ϸoc>y|yf*Ͼ+}x2CmWS&qMW{JE\M;=xSN)4O;X-0"Up[2B)Oq{C>zˇ /$P~:ʌ_] إ:~7'LBSgOɮP/]^Y?=<}W?Ͽ_?ַmn ̾u~{] o+qs~7H689 .%\K,9z$M;ܕ,+﮼ոr8CC7{{eyh5'S]36(w?~kw`_m2X0{/W8MdN4&PNK,X%cXK fRX3Kow?nF+^5}vr{W}mqj2r vJ1ܹw7j>hZki?jg0IeKDQ3DAsY5|ՍH$ \Jy~$ׂa4;^5~9 W|A:N.X6s@ə?K P/A/~2Q㇦[9P<~rs-}&`[<6ĵ^UXZy :.G2݊!n3[T}fHňځn \VLsv2HLJ@/`r,F%Y 86,Q6Ni3.x&& 85/"<b](ϥ `ĺk!ƒaX AL⽅mN6%n5WIut-j\um[5nsE#7 z"ַPD7d{YMX`.3n cv ƍFJi(FNH&5NhÍ9Q  ȓb0M|̮_mD#6 \7}qB5f)8&X78NB(%($HF>f! ikU,,JAulHrHZsr[ Vnd b-0(&0|OksqZgkJtaٍKW^sy Xa `$R80({Ӕ\z3G9d=v>IRe'F\}s~5s)e S>C|x~? 3a3M(5ETg笰@ ,w[Q;iz).tϺ7Y.K /z{xy=pɌ^?P{_S,=OlDP9M͞ C{~VQƖkLxQhO~(O4 `SL}HW2efuR&χgիc팶>,LkNtLܙ,_-ۥ P9,t&I%Gݿ/KY .ϥ .5AXmH'3Eߵ[ SY`ng~兢9g0&JG\}gv[ l̜`j&v҅v+ap-u,fCÃ]7B+.(#qZһ$bX,zO2fV=opr9蘭C[YzWiݿk~ׁdCWU zR9ADnүXߵJ\KUWMR!MejQD~Y8y4[18iBDQU&$Ѭsgᩁfn2kX, +Wҝ/-sS[j/-2oBH㥻 Xl- 5β[r+*_y#9R -`{KE!ٗ{!n4dGo=Ef`쵃/w;0rQ\`Zq 7RGn½ N+{3Die09;L#QJPuō51[ʨq\՛QlN$V^0ǂK$'^Rf:gp!ðpYU ~ekr Aw8xea:dGQXY%BrlZ:4!06NBD}=MʢB@[,d}t+dŊZ+؅߼,UH ,?$//E8 p+@g ""B87ZZ?JxL  O vgFW~w)`ʫw~. A$4uv//';t…Pݿ<[bx2) rVoUd=&g^yrfe aB KٕbRr+dK)Z%F6z)͖\+&^n !2:%@2>XP )Uk CU1BF qX'bX$`$F7qp9Cjp=TRM8^H6E7E, 7=pÆ3-:7` |@WqYƂIo$rڨ*Xq7$>А*)l ܆E 1+[mm<ܺ@KJ_* :DCG}h*}/w5>\P8 qy F\&.5EJ(x[IδbӚ,x5Kk ~@ިu~5~4 w"~*N8 $AXKUDbk!&7 Tbt׌y8f!8eu}kCPI33Vيڭ`ex2 5*ָs~;n\{ZJN.Z.U).tr%kkFŗdS;#ܺCIV%@a8 ;9mQ(ƥQ 3əaִ7kkdKihĤ\Z{UZT%+k>oQX[paچ5v I.V ]c*Kkm\UNr9g:4Iȴ{LIĭ! =.k%2ZA&I3q,I3IhDTʓ2c" hJ-,U4A3?!尯-9]ETDJ o) =[Nu+E`cAox$׉?^VD9%GŸ_5\Y,r7}k9]%ޫ\<3?K}ss; }6͚w}$ʴ_ޞ IefhQJԔTiuZFFEÅbP( BzrH7BLj.bi ܙ,/y%dox!!=0/l6KFQ@N1'#iofH|-3ؽi\IN1FIlגi;5 ϻdaDfs=Ѡ-8Z>m&[bX2` w jHI]p4}My߻>w{ * BN^Ydµ6ߟ.lrn~J/]!PvjNֆXxF-&}'s5HA0ΙeZȔroՌهmX/se>Ҷ5C,+Vo.=oCohbrիO gݑBH8^~Qc/%c[ C'JXrRF i{U#=7]P"bQcܯ-ix\#?_,df_04zeR˃fՁ_D~J}[[9t=UbԖ*葳^R+xާ6Jۈݿ29Owks@)3Y=tܸ|j.ꓧkNI&/jiEհXuS+>zSw2 o[^ Etmm0wr7^r{A2 ffDPvwj}dL[ف"}'s+Ow-n }/??8_:,4'Ƨ{ٴ\ZLFZbZto:SlBݢgWG7GpTsn4p$HÎ~݂^k&zUw"Tdr^2$LL! .F>By>ɍoèFDeWk{#A9pXaC; Qy`VXϽgP,h-x(Q`Y6 xgQebaz0N9>]^`zwZHвD T> 5,# >|.e˸%2 );+6T+sֶ<_?{cewUZ#glhoVft(_ͨď=.J)F$2'W1c僒+,- ;W{ HՀ-6Z]%ͿȂd.KQD bP‘CF#YGI祦d9Gozi$۴;' cJ!ZH{Hb>r1g ( RWQOT J5O)ѝjO@MͶ=)1&͘UrH"+bs89hhUh)stw J`Xw鳥_fQuzVBJ$9H/c|yo4܏CˏXsk^ݔtH̛uIEXopgϾyivno/>^]sXaYc,c7d:]76v࿂>ӓX5egFARkξ)-Ow\6_n}_Ym$7#w뇈E,Q~;\|.nNjͯVeKOwgXٗٛW 禟~}#H{?y`:-^oVɀ*+M}k;g 1,Y^ y.\oK HB9 ھ$!9ԀYO* *؞n o}3?7?*-+L$x5a*#7NK*WhJ64$%Ȏ"䦑?z͞N"zփ:>uOlQ0PE6,,YuP]l (5z5|٢n\=me? > 5ӚWg ]yozϮ?s Kd<Ǩ~E%'FZ34RTNicX JXg+gɡބ/W7[ El7tF"G[{/nvNL([HM6vXӚF2K 7tUڎrRc޶VW0!i2qBN(g ڠnAAs 5oj~s-4NjA?՞w| @ (;#EUrRs21o^eZrEig=PJ*6)w}jhyiӀS&֓N>V9 2E@W3X)1 Zk p۬o`{67OI)|[AkV"Xb &BxF`QOc J`)Yƃ+>A;91۲sqYt_.56"^sѽ~˰Kׯ^沘<߈ΨȰ>pI( 򯠤հZah5Z~~fFA/{8 gSKZz]?{,P=B<!?]8@9`@#SZ24~v1HƸ'6PiwnoDey"zx_ՏE?pIt%>l:GK_}NX<}pf&ct$9|郫dh7-7;87 86)Ҽ66D6xzAQA>88GG3W/;*$?$:G9ӨE!DR,&˅mNbQX#0g&7Zbhk8Vٱm˙T m)ʹ1yS  >ͨgcrٓ**R^O֡ZVz)"h8f޸`Zk!)OҌ&JЌshph#E(ÄmL5h=3ůǪ\{L5}ս;v*' GjAaAj[܃^^I/"3[qrdn !avFZW:G0,L4aʂG(XZgu頹#Fj8w6sؽݟOwO)1z3;;<=35]\$%ةL3C*E.5,%ƥٔ\na1B[&'9$1Q!!ډa*10l<@i@ O,7YLV&/ӎ xLjA}8%G=VߺExk&մEۗ5ϺbM)5JJEcUjFS[)$UږŧJgB\MX8k58HNCɵC$[ 2Ѵz-0}[ +Z4/vz'lf()-Z3Ɠ+w "l,S$`j-h-Fm\0*Lŕ1 $,"QYTٻmlWT~5!75U*$@nl-NRSR T[֘ė8e6 z>cq?+ݨ3Zz@<~C8]Ϛg~S8x5.y#;qҳ `xDG3 "_00&D8'@pJbg`Ȳ[;EDsP02Y.tf:|XT0Gp&HZ!\<۝HX=>QZ˿ APJac j\Jmd.C(S/A*'a-#5,0N#efQ2HbS2Ѕ̐a@Dh+S`˿h BdPb)P3ҾjX$AJW- d{Fw]hFX"6#bFة-n˘(x0꡴ 1Mq"%(0#Q[ʓuUޓDBi~;Z'7]i,% ^b8gGTܸ"+&\/jy|C2alKCgݚV狌&{bnPu؀+.W~{qʲv?3`,0deX~$,$j蹄kH)`ʙHա-\>eAs'R{INQ`S3NTFY!s"#j|O;͌*0&3E5G.@+財 3@l`\P.SCD(V^`-stfle/GTBkn{ȇAoؠ>C Bu :43b3O?P];[ȕ3Lg ʏ4W~kA'4._C׍&a ]cX $ ,2‚S"V ͥȔ,8!pefyyDo@5bFpՏ}̃cC^/ȈG)0JZ.nvs<[>7:LbN.=u6=>>)ƅ:Nsghi!e,fҭQ+K )abxZǶ6Nvkk\Eo5@J eH'|ZP-sLs=XŤogf?ã`zLe0~=n2naf9_[oa_.^锡Eg<7}U޼Y+tSlZ\^"S3*FjEGU<\?th+$䅋hX֢ò(Uڭ)!u[N(@kڭjEh"EG!SqvJ2 Y߰dخ!wW+U;هڳՠZbTЎNEu(ː|B>0]yW[+ݴ%Fu^+w3d>-j8 7okJ2,\G;^dz96k}[Ε66y!ϯg5wkA]ݺh-34/\UjJ73sgf.5AU0ϣOټ =L}9bɌZaVbhT 0y!zW\|j.^\&gtvSD-1ҧDv&n|VxW:"MT;r b'+W.j+OSm^о{|>noa%לMClzq,J>v./HEL`+'{Yբr玶sΘ,TQhs:8J ' #)<Ń'9ER(@EoD!)8yBW)qz؝3SB^ޕwtoqM뎟7Uk:h vv8K^I3Es3$-(U\Se d8)7 ׵  +T<,T帚~>vBkfq7wˡ1UƝUJ㾜1~_ڢ)aCñޝu|qh&IP9Jsks% )Y(u iW撀hd v5k}b3:Bá[m!.dgAnT%u$@ި#!"e=TYġ \:r+ l()Pl[a8%}`ѯ ^Ufj):- wkw/w%:ZN/A#A8/# W",;gQN[]]zz-oZyd M!E9Go7DC9R3- Zv-ABS_V]Z{"I]Z ")뫁Q#ځOS#Z_M#-fzZTHWVVZ+1fZ IŹl@־`pZf0AZ=ySPb5+Ȅ VRr}VF '@y,2#ވj`gaGpA֥o;+V LAϫثbFDjtj(ſ?ozռNRֈeNjh/.B9fA!S*K!QDlHq;ѳCo3>ЦkED'UeÊG!EvPHO~ K{-t@m3jP 2\WF2L@8NkW}VF Eہ:]nHJ q`xeN"zIr#[ d Vv,`yБ?YL'#_-4ӗ lq1Ԛ!*m,B!laL_p!:'qYcQ}Cxu)/i{EɰK"h]mZ-SN{[aaoRt Pu`cY!ah)N`1tgCIҒ+X͖ :4U* bژScP}WYA8Nu 8v;Pa bub"\~f-Ǝg*{8@WjbYOSL>=E:_+=f U{U([,4Ywp`D5h/=PGiaS"{\;qJ,d0ec!!' "Onq$ng8:dzKew2 03.,2mg3p'l O gk\ ^gӻIρ/<ӡ.P*N`ͱXs4&9'dR;MCjzN 8UF^iDY"grEgL+'wG:%-ms!B64d}y)% 91"T2|NV<=ՙș ^Ž1sja;zgﶏ;_#Oio⏭#=K/3'uT:K B|'S@BA-C8xsY} 39"kI4&F I>@csu8{v>ЋjE\~lf<]Sd1qſ?oz<';3o㻻rʉh%:廬ȐOu nJ0wzzy=.m/QeddqPJ 8P:hLk'wvOrz늾_."ީ.Ix"R/ 1`n$$ό̬& ˨-8 {ej%2f3Fg $"Z IK8Td4̙G9C(K΁/';!} :%2;%a 9/$,ůGD ;ɮ'@H5aV?_z ,iBRiPj@7F: &ӻO,/iܥxeŽe.>^t FYCy˜{WS_*7W_<^؋?^ ;~8"Im9GETfЌAwՖĂ^Y *Lj]J-BG@v@bB İ@0n)N0A 22$w *-p*jt2!°WFsbfvx+[n[!P4Iid9sb4G̔HKnL#NgYs)pVuk+ ˨fD[G%6\]r|CxÝgf{JLRwJUB$)<ˉ!u*حUa),- T_Q-Lm~L3Ӆ*}5:s%w?)tbӛ:߸_.nt9!rxifgJy_'-[[AT^IaOzJe7C%v>\jL+Fr!ņdP+dZ҄!ٻqcWds!wdldӐ(ǁ׏ٝ ߢeEYkǍvLŪbpyd[{Y!/-7[4k)g;P*RpKRTG`E\y.oq_;Y?(wCe=ovܯ}ԭݑ&"Хuy?RWjZ(NK8vGR칺n*Ӫ`e" \!8$7,%D.( PDYb%%i$2 yaI!N I YRC d4B%oqM܇$0qDQnF.N7tN㻽tje\ݮܽ>{'#|sQFoEjt'!ȇo_>vd^A7'y#Qdo?S|wv 햓"kd.旅zqw7WͿ;}Q] O-D ^Oŧ;wddj-7'F_QTbU3C3UkIsD{%7kZr 3 99sAiE294|E)CS`ai%-EuTrF 7  烢y[#1VPX_eyfcK;uQ_A7X?2P6pvV4ϝ$_ 9^Aؕ\L1p A." 0lzkO):t+o.#&kC ͈#\ڜ"B3mȥQR CA&h$zCę}R 6j {h\0swp`I^aY{w7ƖwGիfл?w/ P1Rů1:%!|ETa |âY{uW K~t1Ry;Q~Xg腼cS&Q]NȱxfTnS]?)LGK 㽲\I];ɂ0)$G_Z+(O@8h`Lҩ&]5$0좜5%Wj[ J~Ij;]QSX^! EaiA uo5/ r7fqd:.vf@R̲Cgg8V׊?o6NiNiƻ5 us[D3O&hЭo<q+\ƓsDAs}sd3j7o+a2@./f.мJP->Ii(4+Wu+nR5 lVtD` ?}:dPbٽ\>A-UdcAX u@oI]CtSt;^5 K0-ʏwZ uou ´yAk8[ LKsFګ^4 7pS='jy[jҏdq7a;^RHc?BL7K{n6ra&飋[@-PAhQQ#穧8+!}FJ. ޼A&&tlM̗*S‘|`csNwQFBƘff1.'ٮt"IH&`/gWW:~$mfF>$ϟУEB1Ǥkʺp*5Jv>G98O|AS0/Fuu$pns`[*H:ѻXmgL>>Ł]?z y}ƸvVҧ(jԉ"eƌ2 f|Ueղ,U^NfDNgrFug& M&V*;trb].RβWar| '7\U0'j04IdpheQC8G)n2߀m9!?! ėU,! 2-ov -8d(tO5TaR , G7P+ :"嚝JXB1::j1ɘEQ͌)IJ*Tr-I5MDƙa@ 򛄸0PRg/=v.x+BngֲL{!}_t I;4A5CZqM 8+6Sj iۻN?7@kYXreSRTT GKi_Q$H_݌8F#kԩq*]ɟ i|?VK )u|YN8J$qP|)w8Xih[|RJ u]FTyӝLqcХaS 2 A!%{,Pá#25隯Uݵѳy֌VׄU \㾌`^;3:+>rVϮN6ϡZ#~)MTNt ջeRoMu#:;|8( J :](ܮʂ0"]`Lˆ`V!n.oCM {+i]O* ݄| s;_<3%hPW&aK}UޟUzXt[-xoEt:?5Ӛlӹ{8NJKre)J%rRluӗ|h6|n|pr Ŭ+罫0++4mF}PY*JZ2((fVՄXe7 H>)]RkJ8劊OXh!4vc(lgL!ML2@Н{<;qF; D+,cgCWLVVw^1LlZՋӖ^l{>;DkfLQk ۅ2bB.מ\ץC+k]h5ZمNm/u fvaxS ؁VVzg+VXX #Nmwé XME i*EZ嵏asf5cνmRUvxmݚL7⫝̸o[tzw~nuEV|X20dvୂu- #H (tijg0rgB:!3[0+Tea}FU!9[U6`aEHYie9C+CrGFX-uHE_A_=;X:ևtJؾբ/U~Ht_rj6VnI0k8ݛ.?+nR?d 1=.23rӎ5m-- MS2@ k| 7=`ݧyH>3N4u{T>uP#fhk{~A C}1AwjK>Ă Fm4?LC9Fq<#!2[#7amѦ۠ҝ0=wU>VhC<@(`2e_ze Am!EeHr~;{ٜi vv*M{*8A*]hyȖ4IGƔ@;].{"MnVG2Tj~' 0FRI %q(-[B$Ԕ6"ECYo*t ظyحrc!Ilm`wЇ9Z t'CnX%Oc63 OS;2f(>̾o2B63ry%FXC}z`*VnlϨF'Q?r u%@M|({XrG;I}]Õ3vޙԒ\:H߶' -mOD[ M&i釓9|vⷸ_[o>^d> svz)3WTTmlaq^d4'X b4EgTf+1)HΏ삘VsiF_L7}J]I7Y2&0;%2Nbx 6Bl @T*6}P {ٙVh&4[CXbaWSTSOƒ FA093%9H[Y(|Ha8iF~/W~CDxoc),:Hه  $(c-+Γ@9&YKMb y:˅%wJşo<o.L@KhӾtd|"R %%`CޤjL,iE fJ+p[c͠9(LE JJSAqc5TfIGw?f;hō.3-_~ 0!wnhk0H U}#Z!# *XVciv|1T`\FdY&mxx<ca>kœ%cCԶ:D{зb!{CUR뾜R$ Imk7f57*yB~'*2*L2*吢Ъ e X$,ʢc Y#lnWYvF镝1heYVÃܠ|B1Ԛ븻6q_+5*hŁu3䈲C Ua[5>O!o;=<$:p~\;u^]H )pw*l0c]K=@h1t EBBS:)6X20f|%F/>}CEkC.hNV- P[똽-e|$Wk~pj$up>r_ҽtCHG 4![%ꭷ~Xۯ$rAn,(k KMl9[`Wz'e:O6"u%B1EO"ʑJ}Oc:T,B]u}Hb?#V(P;wǠ+9В'c5$)/+[tHEzCbC74VhaavOb-34mIvEAX|pHza9CMWY=b}ݟXk#)ldVa*Y2L=6Ba~4c~.׽m3LzתXjFv?bG }K([E~/^]<:N"7fp>* hk&r^g٬t4֫d"SA)72:eDozg;,H uƢ)Z7Z-YRd5y{wIvrXO# (I,I$vy Bh@;/΋Cm93gQ'gILaPa{Ρº8r({(G BJamy(#I>BpF~&?-u-hb('7/ev.u٣$O\g˯TFNk;%Ӧ]?!^NW2)% f0^BH "5N"dOIɆ/7=N./7er5|uxzgW y:|4cfosc'In0?e_z3IR[7uM*s_`0 z[Qv\?ᛄ!fE/yh.}ͩ` ƋT'bɇߞԓ>^\Lߜbf߹(w\MzQMEGIѐ͟ ,MFA\|FqШ0Raj0ANV10!*#t #&dpa`p(,A5O  ?HLeH fF k0N4w?Bl< tBoBN`@r 9&54K +n050kFk5# I:wQ L0C +2)l >EMvY҄l0yjzYN] W5]=i%cXs}wI%»EDϓgy~⣷oʛ2z1xt 2 sΠU;ϭd‚Յ2/jS3T\IjcNO֤ pojLjuUpO?uWNe۱NV1).>$8ߥkK} Dhr͎3 %g LG{@dm+Q2Q&QZ%}Xr%WnGY\n$ 5Dp3 $OMZ\'rf"<THJeY+=SL*)1$B9I6憒]jH/ܑ^XRo?[+lHaE !pA&B>3̀iPas&Ԑ®HVjؒkf˰a߽bj-Egel4P튋d4`pԚ컋n8!26vw!e+@ ͐1c_/2gݛh㱁>{V`oXFK Mzm[@9S}^lX;G^p]VhkM6ixu5Rxv;'UCLTP1K#}9IC[x2$7/I3O>$UH&'%Ƀf@oXA$ZJ+x-%¨6A wCا`(j8hd{NI|MֆDSR;%Uš8Sm63dr}}={<%SR>T{ G G G 7-4rLi4!$Ap$JA \jR)jVrwur_nGY~l*+c/;5jsPsa-Zx%6jĪ6> y!e[lJ@lKJ͕lR ҏ2fыdFӵjmIf v8Ms[9 ,p[kSJSHD-H[EE}%ї6oF*grLRFlrzz)=P>;7o% å%)tQ*` (Ua#lff釫anI+ o_Ik$kXBSq {r՟]Bwsx0a={LY 0w׿]m>H2*~ncexb <;jUmbNx>z|f*>(`sS'6kn}O]J G6u<6V߃(,7|?\Y~Wrse_&.qA{ xNYDr/ߕw~΄t6{ M{Ǩlpz"|}dAHi[tʨF̡n\_h$\\iT6&̨ 9 'MmԳDӘ3s~x7'k 6:uEfks6?~Nwe̴}\B_ƫk?}Lf32!d*DCNG\IZ~6儜yqOn>zꇜsPij# y&eS'{7l}zL L'VmliXwb:Wn;۔f]e;W 19nńD1a C++I2Z y% gf,s N[I֬B4" LGH\ߍk,N/Ń'դuMXo9=Tޞ]L٢hI+ڳE߻4XNؔfR)Zd $N9bFRaP|@iM5R\2 Âix?]!:\Tѕ3]Bi t7Ǎ2'C!,U6d&3AVk,FǗr>!BN~2L)ʽawU!9:0|/:H|qpo?7!Amƣ2f/|oL\~2dt^̥waLςF!vW^+HH O5UPhuQ2OX*MȤyb:o&w_w1+Bpi2 bI>g F6!rehvB=ș7Ub#j"ab:Bg)+ C?˻?K?MR> JTh0W&a`W c}ɾȡFM0q9wR@x :yxf<ۿ}oJ;}mJOCm=7 1y9u}1M}.j@myCbA.Ω]Z{GGG7I+$gXZSU 1E3b >~F X&ӊvE+}D#DÒe]>JO')G`DBe×tPJsmLik㬪Jx hN9p}dێ 5gw?$[?Ky-"Hr=EZ|5Ru¢0ФjɂdԨHEfe"Q--\NiOGU[W\N/?r%GWU945+*gXaሊೌ7w!$Ke-WxK jJ'I!յ?)M|%%pUUF4 Hd2|Kf:aj UJ> ZB=]fWKmbϱdc;#Gdh|1,6r2}#:-}/Sh_ch9v0IRn$hP]=:8X S\pL&d&Ӌ {Qp*\Lh4% G½$, nIZ)IFa(%pjnF$5 ]? %o NL!=3ó?֝s -, !\Zy+ȅ{{Z1#6TH22 :%4(/ޢѢѢѢMjHV+'J;-_-wJ2FHkR$vf\slS|I+OMr-dє'GoQ e?539 dF $a$!MjA3 J[O4cl̙qk< [QE}LY Zɧ[]lWuhFz5$qWU~_ӫ,!}߃,YvJ )uXܣ4oh}kd"͠+ګBU*kolsMل6n t ZM_EWL)!)'* T%wd!L@^p,E PCO)]>SI+5dM8絡/%WtҖ>.с&4Jz>9;N$iL7|v/6\)6Tq˝삾MvEK1&9e#UQG_' G> Mж)x@g9NѢE']u~ ue5K\f|zxeKB/I*dlz#844H#YA^!٪UG=F@5zz[5J$>˪;XC6Ja5߭ +Ψi5pxpk9%ſP%޸@r|}ef`z9_.ҿ_OeLHNػ$غCjD&ؤN#=P~I#)*`ߛIαaք"Цܑ͞4&):Csg l&X[dTTq/9.}&ٽ6JÞԾIxE*ciS&Jp^[+S5l^ۚ4Po?]E<RϘyL m(7u2F) NjnxeME)j_0Q9}SN-n^ثu*c|K{=c@s\ۃQ5 pb% ӡs}>8gFZQϡ\)Nw>{phG$끇 CސI(SC%> H1(ƏUZD m= 4{^;Wa\sUwl)HkA41VK'lck_Ĕ;0niéb@@h^I×YD:5 9Msbb<D)ȶD;Gd0cqXLB`0H7$A$=7Da*A6`8!7u w,A\"Kg-*7r5VK miL`aH7laD%D7g!‘" ڶ5Fk_рlCBykZ4VUBr"^ʃ"`@<'KTU %e3K"X\uA# 14rpgdhﴬ2T[3dv5;.s)!ZraQ] ݾ9W/ڌpus v)qBg/2v)%_A~L1~>|mwz򷋟]<{7gd-SS/JDqK.AVpVga$w9B 6I;t@<A0KtgPs/g%P)Ǻ֗;Cl JǴnrdIz:B4G썗&tf aF[rN S:"?OPg1 ~>5&P4;˨Go0*c:\ed6l5<+B ?sln +?+f/g-TWΏא귛?.Cg&Cj;v u? 4$V16Ǧ1-fi3Q|JvT󎷻Kl^zi/S d55;GWZZ Cx-(&Fj0t"u7ODS6![ Od//MR;NWAjĖБ<)ˤ| xLgrǬxJ5jDtH Kv=sDzvNޢ'CVsh&XUs 1Q;,(]PF p$}+O99گ Vk킛Ve;'VSh>_%ZJ8"R'P>b Cr?육[}kNSEK0 s|t>o&䂓pm<)-0B>'S;(aNDd vIx^Wam\qUwmt:Hk~6-VB)`lMc"ʭ,#EYt/4az)iqk !ղX(&a zu-C)p \j,p$\א|M l bZdc{rBn.+2(j2J 39Zj l@Z-O*4o&1Y6F$BA㍋Hc5SV!$1PCSޙ1ׄ&9.N\_dBvZRԤr7AG w`xy.]&ӱS>G[<CԎ{ci<k/a9((~X"]J$Sv/i8j^)FX`jt\;K[)4_XI@ 59Nr*yMrt)H+:$ l$U?H}fMZj龼UK8L{nt]s_*F󔭞](SQFєsw!>|t=x5tR޲܈>\ dSALrc*0\ h'72OqB;xs3F @9;y8&^\0ۇů3{sg)CKI=kn}0 FIf,i䛇WEADWgcz^ yZ9fc8\ !P=W84L^X qT!To% /3.+Pjãӣkq^<]}_&{yM %Q-倡),,~3,6ndϞ]MtNJRr\$L9K3xYL@ofWSЫ{7~nE;GЩ2V]d R Lѯ)R‘RcN4`PV R2521~S0d $sF;k2TKU͙C 2R;0`$CN Rp9wM7 eK;:o5-ю)Qpò 7 V( ^C2ep@wQe)E"wP~ +^$ʼnQ&: %}1<#ͻ![.16N-ћh-{ޭ|&aS?|8 s&>yˏnka⃽7 Y8o _6wnBSխ]5&/ ^ٯ=_]V i [*QGWz-^rTB:G\#)lv 0 $-j>?K+ӗ0?mZy- ~Rf^_vhU1iOhOQ>Cl`%Cl%6+7Y˿]ʘ^ЗtUU&voBUw_}LV.Ch@oX2fqц<ݛr xM93dJQDAw}K}q!Ū\S*斑lオSf[AZ!i S_$Ak@02Ékd+?׉w cT3U$#*kaq&rzK^T/|#G8qM(4&үFi"_tš SWM퀛q89t?PZ st6$AQ9J8G@ð T s3 bsZo0gg2*-sn)9L_` ?gLZJP'G|r"gq:T>=\?>|Z9cEhX0~|smޯ_>|~cѩ+3ڗ&3B#|bƑ" "uW"GxbcYoLQ PXpSK@0X(8d> .٩!q %fkp80$.)XjkhCCi8`8tSH%U49EnjG_.DX*nπK+#EͼW:0n@)p0L9r -WSnZ w(S_F1r&F̘s0XYFsJaA;CP:4!kARb$69d,HdT#bh¨MjRFcPF|pn?\[4RfeCÓ{o.)]Uy Os?ŃqTsT9+4<) NmHHdz&e{t%C߱ ڲ1$pKԠ{`iP"i3u{"fQ>JO7M79zHV8 {[U|n3F +-^4ȁjU083^v8O>:ePKjZ'n\san 2taŤK6S+a\Rnp,-JB )ehu,73cٻ&mdWT9jpi\嗵}R[&le7"AУDj4][Q(oh74h4_Jy[V Lyt^Y$; ͰY3l ㈐)o'TjmkD0HxWHM8847 Z×Qk`+**h뛞64ު<\k(Mʮz:#&+*S<WFEIPsrhmUf("Z/N1h3Y{ynUQ/XRX0D)k/!b wv͌!BΨh𵛋_Vmw݌)v4 H>(=o|<\yFQTuhy>ZKs\Nw/wާ;Wq1:ݭQ\,J%\uŏw=#6U}Z|s )XO~M+|kmaJ K[} <ڃdXbgGj„IyVGOigyfZHCmZE`ŷG}M4'U.Ma-ʺ{1BC:hH|;jZC*QLN0u߫1$L׳s,'uVVOr6g_Wm5m]GECp K@6\h__ RzBCk %oG-avxF0f-7 ea, WSp)O[/tiӁY5ՙ!%FUb uњ1#"NR-NQ< ʰ{L5=7THD:a͓c# $Ҕs"QBҔ$ ,\SWyxLDtT@WyS1{mcJ0TU5ʡ]iS]jX/Fصj78y)Cօc eOˁ1Ӭ͝lu ߘY[VQ~,zxv$?5yks$[F'Wq'֯ع\ mYڀaD?~#&.ufk馡i=eQ~y2+\Vź8OAvJ5.)LGx .8"J2R[,Uo>׫EzJj_Xm kH-b5tu;=i%m;- 2K.Z˭n@!bvn+o6hsXp`\͑H,#oOO k:+|GXX>[ׁ÷'1,o^V Tfˠrq>QF˷Y\}x:d{soW4ҹ,m&=땶˫R $YLa;j7-g&s [*4gʝe(sm/Q*+kOzTM]!IF\RĩAk^HXJTE'Dxou.D 3a܊=%c6nB-%\ÍTjQ#Ȍ%e8 ʢ 3dɥ˔ϫׂn1ղQ.*q%P]K्ah~ϫ^k.2lhܮpX r"HG&_Q> hu-+ef/\6]X<7n>mdG+eSHgfo~77#W0ƾiyt n4yvK&i,ΎT6WCX,Lџ|t(}29Qc{ t<폓C1Q&_լ4DoP03mh;]3(V*;+r[[xI/wgCd::(t`0)] Ku.U\O ?Ѿˈ!xeoFdm&c.MC_.:M.xXf2KGN9B`!J# &q.t31 &۹3&ϱ k[U6mA10lH3Ƀ۔Z^&'$Y&^4;huZY?͍^/kMa5|BBٶ C<ڹpJ+ͪb0V]{A֨/&8R$|9#hmtpI;V!9{ >$5!1px  :$% r>d 2LRH e.B$JabD2A0` MbP@ˌuqquamҌ*NjBFbō!I @Qa41pRX` :A0V <5 GGҰHL F"-x@ cr‚՟!#+X_dIxbMtI(/~{zoHsFNnc: qlSxRU)CT o?/ ;jcC2[K# ?b@ 6=emua`F@/&ѓ B2` Ɖ)*4kulX8`+43j7*Dۨ1뱿maF|瀾}Lnj3 ζR.W?OR6II\nz9 $s'ńD @T?_?YC!N4X  iɔKiG900} BY[ G;?V<3Rdfsѐ%Ȅ3ĒdsA${D׃uv!LS|bIEgJYA?ˇ(b.'Ds\MNĜmf/Q:Y$a3Q@#,Y޻ĩBP}r%t6daѺ(!-Vۿ  #_G+/5U|E l͟iv}kC.>P9R}&v?ݹqJ[TըBhØ%USzs-~F*̐`]/GuU]FmC$. )H_ggu!߻3fu! fkCXȭֺ`55 >\j9JQR DpqUyP?~]\#PZ@ ĝkۿ=Ou*~C% 0ۻ\8=-;gNͳO/4gaLZjc(ʻXIƽ|K=n`SVwg[~IVKiS}(˧zR_ uSة~WXf>21o_㎹?dE<8{4wJ$JvAJQO\2C߾R. QJ}JJir\&iVvJ=0XTKHBr\Ӗ&J*a"D&IW< Xlqpd#bE65z;WYl%Xۑ6N;H)8kÃ=|cYba}(""( fc~SZ5 ծ>_8yb".Kn*q ˆPՂoѳ u s2- "&Q eYf!"s`l瓧m*NO<0r,{LkU|FpW4n6MSݜDD[ ̀3Zv0ò~ JZHGmoB_J g]ͦv*]KWR@X`&y)jس1b@ "R!deB͵iyj탑Qm420҄e2MA3bB82bHw'ʗjA Pr'$/Ua 2;N2e5#J1$M$ؤ2f*cII()<շ(APnΚʓtx:w/ QD^ ls$2Rg7IF$q9!s9&2IPI@$F E4F$&bH"S5-(ՄiL9W@'a0 -b /9;Y~YIH_!O ~<)yB5%U<p me!+dWF{]R'ҩ h(^o]nȬ+ 4@?eP;,p0M$>~] K!vVl^AYI0i2gVg:TZymPU܆&W̺O(#XK%Hj>PE@m];I{s"R k|Bs IKZ]3-cF҉B#8&@s͵ IT+2R*q 7h"6k3zeu7;@ *R%ܔ'wԪ< ->h;N ;+&2C-laMC?Ɖ~lZe&$I?iwYCսq&˦@ǧ{o`Jp.Jxcկ=5dFhV}OO_]h<~KOӟ=vbi^|J]eov?LkAE%9 e݀\FfLZD·j;BvZ,7-!صH>xFӡ&&ZQ$ Zk%`ǰn/!-4e?]IgWϳI<4l eVi)72sݵg:"1rfQ.* S\\k҃p 05RPzPZs5pʮC?ac ymӑj)ZMvu^!=t%yoCyG_(E2ٷ#Fe:Eyt銑Y2uGG̐>;H ᣿NL>Ԟݦ5?J([rӼ9ٵBR+ۏI\NO5-ֿ?`aRjԺB2gd^he o'E6{JFn}%ڞE>rJfȸYcH&b\;b8KK&ܰ>K,Kk;7x"ϸrOUg6ʗ21imCJQD-T9< g2.AIn_Swy]Ue9+0m Vp2,+OUiuēD<݆Oֆ1^ ô6Ẏ;;O3| i@u,Z C[iߛZ-!l.[jxl`p@-wuj,Fإ,tTq4d&^̀R9<:[~MrjjkBFH xfm*:#.n2sZϵUl*NRJ8yĹn:sO82E칸{pǴKG=nH>CO^]-6ڷ ѹcgb䁔%/)A[ak; lSPu9Ni% t<+,:f@{̸u%e/eĥu%V$P4DWVȫ Tax1k fhbV2)AC%~ځ - D(J`Ze{2W8#Ւݖ"**2-OCsr,re2Ne&&% iy2߰PtwϾaس5sԑ3ϲkwwzK,Z=b')ZMA%(iyKU&xtf"y;9L uAԁv.J|i-bqq`$k5xgOFOДP~34&RpMWrV=s'-9H M(s@ըuDQ ͚Q;P @{BAsO)TXM9m:iuӌɗه`LJ~0'#eړ >βni~~u/m3):70%W GWqYG/+TcdΗooуoi6vS֎'?_=s~~H:ڐ/ʠFOwBA@T>&?5zEj%I(%$96Zf'RM4/x5HV & 'QU' Zw$!nUUγʚYSZ1!u׊V#;j(z +.<ZuEqā/ |Dt3 o!TtsB)+,0 x M!n\TbZɮΟ̵M3P "&ܿ$똥r5(AIgBKA 0 {(ARI:x5FA\J.n=a=\*.|w՛یWUh%>cL \Lk'I=1Y|\y{AJRb`uXlWjQhA }ݯqe*+^9 ^I+23qtTq @ ,UqU%<'ZVc( 0IEȆكd DS8{=H Eh51<`ZPR\D:`rC;|ɈG#~4"e#r ʳ e40 4: īCnr;MrEm,,*4_9P 5O_;V6r:LՖB$p=wA.c/=K4  Ђ([bTXO6|0YT>Jai˹Y덓rrF8 _)y1ur ۽ц]aJD] %gcvₐQǥBD?B_,tɒ +]P4W6q+ L+VD+PV8cǥ\kB_c1iÏ5:sSdߗ JT2XݣA?fM?)c-\ p2bfmg"dUXQ_) MPV"\do߼hcy%9vs*' 6ېB+!$q@85O7doPJE-aY[bv5VF.Kg=mp= *:/?$ߑgxm>oû*'V92eZl<2Zja ;U['&|h'AZ$TW Hh 崃?o߿6$aBbW]9$xɿ{ _<@DT>>Nh6/sMiwpL&2{?>~ _Y_{m>Me5!4ѽ$Fο>8nc6$JcҢMYKiKy1o7ri# Yc?0+6?{? @h_>d'{}{n ll=vs`'=83:i y|d<>B Fre`JZ&<8rFo!@ <Yޙ=d%l]9KDrYnZ9 T7cQ) W_0ND]BSpC}ɲ@=7"(iX`L"wӂ8RjP-5Ծ T%6?Q*SpA[͸Sr`JhSsXLQ])ϴxdTnG2FU7P\jGon|7q%)7_$ɶζ]IJNrbcf+ v3LvW:LSRr³o?mH4}B #uV ^]h ؍$T*R='P^Y>ٻFr$W,f)&/0هE}mP`lnJGlS/`0FҰcd(#eM/A9OY*8`ſ*-99?azsi y6Oùn:ѐ`;wL5`90!,b.\b|)=y}J2-V@Ëj3B-B*:\u̮ZoMg.G*]t9R28:D -ǚ)R"ۋT|6|#PՈ=R"Ļ CM3Be.:1 3.6zEE{T!]xf3{(C}q|f;\^ٙK.ow5:Wõ ˏjQ]יB{Lɡѝv2 Q`L9 휑[hS*wݽ`mCO fHJ?qk&C-;= m*SLkRtIW[Qԭ'-:ZY9-NJ@e% qO YQᥦk7J.Î2z\_O>j&9䃭)?QM-:1fGƀf_gP-1+dCf[f_~,1!\ !=tT8SX? !:y"컯Ra*?NCcvJTX Td3-*c?w_~ч q21hп P. &2y![XQúdéF9laJT~EO0鮆ߪSζ|=ٚO֣nRr]FT3%ljxVAK6B661{/ο,zБG4dpۥs E7$6 ՟sԠ??Mt0o_]_UDdp;HĘmS<Fw?/!|Ӆpo-)\L~t5[ray H\hcqRl>%dNP 1Iy.d"1e&,&3;q=wv6=Y4ƅ\ V0T(k5b[b;sh'+{՞2Q <~KM6-f(4±hpyȚMc/ y)HrދP幹7֋`+cG tŔ斂aiJ`'/+(KԖl~8w1C0=ʴd-t_6Jm(U)ХsN4*M6"MJli| 1PbҗE֚/jEZp KPqTU"jLm4aud:WK9ϥG8q`K& @2gldPRN5`j3p8ŬZ-IbQN. CD _ %)'aw J,HA1%Z- H*\l4GTIZW cd,~k`/wz R"5Q$S.KlHE$!w.*8 Giq4;s]k2St2k5h2e+rm6xSybhg}J/!yֿMD!-G \&' IwmY-XQ: ZYf/Y4, u:#btq; Q@8zΗv2PI/I^Y0=3Gph\^:= $' PljV?'."iciWt2?%9q"x@7#M_Nփ`kwT.X٬Z\bN14D$|(-O ě9Pvc:iteA/Hn1Nh xy-:X_N/Q5rz3Up]g *){n=FEzBeΠYJkr_ ~c[R_,Yc $/Nq9*X\jqI:C!gTǐ&]XԐJ '+SaSBpns ^85 }\C%a5-0TMYLŎw!ktQY=qn:u.$]e]V>>($Ze$=/f o8o^Qhl qv]w;wOp^ 2ң'yE!>EnFav[j(ApĥR' Xǥ8[r(rO Js@s_5HlLa0oB"k MRV`&h7}8" yҕ(D8hʹ')`D(ǂ3X26hI׳mA>oӱ>eIt6?/=pjN2_>zC"9D`ȧ/ ]t·_t48 c€ᢺ7?UXtɁ/\ŨbyГߖy^)n-pJx/\0<Dc_-䇾J%Xѱ%Qq3 T58"crˎɚ(r&ZTM%bbe.+QSQocK~rB׏ޅ95 G&G y4`a/ȗPښ3JMn`~rnd^-~dlk`(BD2hPv˓'%g`Ѽa:n)Ay[˄;#h]$"8l$cmנ޻8'pUsM-52E!:qqdSA;K`_0)h*wliGSDv؍,R?jSϞƫT<5;qL-Y-+ۡtˤM2 B6\@8isR$ڱ]nНAv8&ZҺtrJnkb H2O]d7Z]D> ]*ätBGTJa -)hɰ3V!/E$uVNx<LAɱ| u]ٍg1ƻÿjN6Ʌ//dWm/tpN*|e kMY-;qFsNgp _dkǾvᴭ}9fa_^wD;](+\PD&2Iqjxq!DP&x"ؼ-_ŭx.ޒ2;s(%qʂ|P_^L{0ob,4~&`$>_\MN_*CLUDIE5THS%s57ՓRlQQ# (pKe0+ 5%1*ib#!7=h !0:r۸ƭ嶺TӂΈ{#iI1s,%Som)-h!2AMN괹/_(r|LP0)2/3 q9%WrVKL0;P*q⯅jAGJ\rTMR"k=H"Vx@/ /JcKJ&VXȞj 9ݵV KJN{]w6{vGO {E 0oz17??=6b)E7|KA%U C-3@[g> ur/A=|ԐNڔI%ꌓ%TiE`;[c:XU`̭ <ߞ_;b@lNj8vl]"x(Y][s6+,n(~qf&T%'Sq&"A։,)xf6~R ePͶTf,`Fwp{ad+Gxu3l9 oQ[4^'=%!4&>|:k/>c_kK{RQJcȣ>7q#o1yNcG7֗u$I|*IKtת2ihn~hyk>ݯweQ8SIp4j(j,-*X!RJ%] bbf,BU#C#)nȱ#).-Iq0JʆR⋷tE?r6loYVkۏӹQD] nGXp!UV [A齾^FcQLstqӉq{Aw;{Z zõ6eLdٮ/ /ꂃħ#t2'j~ؼZ:\Rs-x!Tr]U2 NP$ B-Z1rBㄱtߩXa[^{vS0N^Ubsp'3X=n}! [,=>Yj&g@\ ^wؙSx༩g>NC Rj_@S5K+*~g읰G@ppI8IG'ݽm(9Q^4$>y'.jd|!'RJHT*HJ 0V81d3H%IҘ$)N(NAi=ץUl; (Cxe-SRP ( `BP$Q2"DP!5FTcP ]?gw7=x{0@cAq@5H$PZ*M<|bL;j/Gp JxfSV?e٪npVI/6Jm@ԭHID;QY &f]_,E䷞|2qdWf<$h`ZV+vx!0KP]h}p$h,,~"cS<6ªl iOW> B {iv:AȎQZHh$*4^ UI8i'{[ [^ ^Ɔ5369 jK2ȮVq^W׮7ӽ1Rh-0@b&YTq)il Fc b8HjBq@cThL*gQõ-4ݷw,m5ɽOk5٭!ƤFsNQIًд!m,~63K[KbX"8@BHJ%1pC"$'mR9l$ehKf['Th%Co}Ja#"Gl3ۭwmf3PDs# 1”E($*AE*qNc$A䊠X)YU.R8;5rhUU? Vn5jUp?a[1*KЬ"_ACՄDžmvF$P.zDZ0b}L6z-@D-[1Jh`2u_ NdsEE_ ̘b97%˾eh ԅs cJJÇ !H6uSx*rm.6 ߫R c`q1DhUCwV/mP?p# Jg-h 3'K_1v8FY_At.?.2漬8=Z:+Yʦ=UPmgy%L@}#HIG[' Sho*1IU3 R+z@ώvs+@!QNREb[@4h%srYv,>!Vj!aaF1me<-d%3V90^ՠR ޸CsUUj.oXnkC2Cyc&c뵒1xt9?:h?g|in? <\, o6(@z ڶ );K?>Uzb L_wX5 !/ú3+b6.nbMz'R@x yR*, H"FJ Inei 9:%J`LdY,ƈgo!=P^j!|z)zoi7 Q:n wTnٰSJZn%[#Q1a-} Wt|1p:諸:z ;8neT[#Q1Uv.dr0N83wv!cs^ŇnǏ]|BŇU:λK %7sR>$9E-mjn4K8q4&Jww{:& 9ąnvv:BnlQ}*H8Vbs.Ox'-w%fvU|)`~T>̐yQ/J ';#t-{Zv,yIY{\۱ڒb3@ci~]SF gұV fcƞ\Qf+[>-nOI'\}`%8Ԇ!m(pZb\qf0K iƧjO01M2sϟ(AQh&%(4XONܽsvv.Dpx +m\qt蕝y(ߗE'!] HAP0S0'MKTx5ڙR4U BD;` a#+P4梟rEc;筎J'6aΖq%9VD : 6W6DHAC^9NW?!aifƠAI!G4y뽄58l#pm?dVΞܧ Pa+f jgZ[*k݊rqv{ad+!f4ɿ/Abm(-̓^,'pX{=޽7isy}>Zޚ~(eѳp^ـ~a.qt1Wֹڼ[=-I)j|Vyxup  #bc5MaNj)nbjR>:m+un{AH.J&O_)Y;ۦMDyݦ_ѪFvH @MbRˆ:[6fyיYEl#tbכdjoބ7%K uswdqY==~a3X/ݬ1֧ëƟ?+ *aBCF($PP8 OM DXQOY8ݣ d\?b<?;FʜitRumvFi31/6\b'룍*b]5WIPL>}5n(͗2; 5E3X^Npq^S>v 7cML7V`:f=]?6vdo-obg¹\1.8#-udouUy@f:J |6Jl g*7Pk/ET kYԗtA5n: `!k%T bRMUQO.-Yۻ@kSK%(3,2i\?ف66GZ.77J(ͺ#j1Ugi&4r6w_<7ΏC&ix3Ewlp+e/A:f5Mk.IwƏهFRyҦiZuy(D;gʄ !Z. dhV՘AöNZeQ3-lM>>渣HVb VT[}ZDۍVn?K68J6uJ5 ~]%API{`W5 u87u]#AUc! `dZוo,JJ=e(!SP^w9Yc+.2R#"iOr4&g-~yu!uvrt{9ZvwVe?էWV,z=?書V.s Tm6|H]Δ8}L蛙OwN~1&nλqAm${Dߛҡ*4~aJKkhFUpLuM.t8յJ\NTF Ќjsϗ䈆iK$Yl1>nm$%tN".l׊Ev"I^;!/ZCY,$KG~: "@wO`m!}_3鋱ݺ[O.Qe$T(Y)p:uv*F%߽{E?SRw>|s7pC3`B<KZ_Te"} Ƞ (l=]v( ,턹~`g-juM,c$^fyA 7/O_GĂ.IUSj?X?z}s#tIo] 4?T Rs@GZq17qqJ5տNRi?To@DyvuLn,;0^v^qK!Cdߎ.>۸kT4shcJPjA uf{amhq}UbbeQZK%mox:<.n/gX&΁vOFj/&T̕aM=3ԺES>մ~s-#e6D> w]G=|(~W2\갈Rmx.?Y[7[7xܖW>-ӝ_r[j0*6jϵav:c~#If5hQ3X[ \s ):Kଷqj1&iw+$G;gn S87W,Z@j[QpΡti*,}fe{UPr#U,b?!cV*]~ URֈsC * 73x7Vb]F"Wȭ+%1{|7߶pjmrRF_EnDB7SYvvGl<}2O|pV:ӏN0HɎ >MGb&i-o諰)mE2JϘUnhDSƔ\]W t%-Fv>P3X1XԘ(Hq;oHyÁ8Ūl5F5:KLX .%}﷥Vz̵>đp*ljNCnt~Լsޗ})BiT.;~C؂'P𳦧yT溹!9jM͜NNQs7~[2ѡ;NL/BT[$qe cHcS6k!e rP~[&owXykւ߼YE5Ÿ`cAM*??y䏳y ћ<j/M<3rНu;V+~w_ T2Mb_f%G ?4Hv'=c|h/ڸ==aC_y^ZDV!ѝw[GteZȂ&ޒPqU\~|'@n>ޥuNh;*8$h|o {[jr:e7Yd_XFX㦪JJ'%N(+GQQ%r5O.Of24Ч ϑNrU]86@Wޅs3%IjtV4WrJ5XJ_ZNYk[wl?-G!_m ܞ=]BFx.LXVhOs'ē*BϘyhuž5#=Y! 1cAeDLCŦm&4q$M h$#&rn^%7T<#!od̚,5MCwLJU)ס;Ňtqtl⨠r:K+ɹLj]*V`RJٴ8~4 u[,u%Pt?>92W-if P\>7G7]7ctX^MQӍ~]m,mW0}Go‚Ўydu 㽪ia=FCuU`䔒q5*6G\(&SDjL!\G;e1*,,c6T]%!"%L+J 51 O\Y ÅQA)#q΅LZJEتpk@;ў5PD-unD1~}y1lbB<,0};׷.׹Q*a/_-?yOJi.r_&߆Oʧ2\dgUS{ 70'ق_G#77[ݕO3/0 ZQ\>=x=I|{^9+m_#`%VI4:X0gOM*Zy q2qBTu]6T ӂ3yV> j0R_M>&F_sp7Lu6mF.(^۽tOyaVyڷ̿%%RL2)*;+3Dd0Hټ\Bs5wo߼=nnnnsd$`CNK|S[ ƉՃCԑX'iz'>sŀf! e'Bb(Rax41צ0$=,"g R?Hu1zΦ,%WIdZa|*&@u3!\!IQ#E.EuzX3NffV{f}4}R>!=e0a}}I2=?Ta̽^pB3wDGj!BUB=(w1céTEۗ>`h+<̌9osI;qĕ0;9 '=DvVw֦wUu~jE!bŽ`G))D:T\!8K[7Bۊ}@ kiEFjv戭D*;,AREU)EGO:jtqb:/d`dAADz?Ƀ}ꮐ_!)*-s9~:KDR=uP^}/U`<5KpBQ"y?$K-6 BpV.LAe4<:mQѥk_Ir_܁Jݣ1 90jʴGÅS|>¨uɽ(,7Zf(al,WLi^ e# p[,s{^xH>1}K0!q@̹eq~L(!> ^Pa^Piez~O.S{S6l5Cuí&Lt,FUgk\[XS;֓je;hSj~ @pw{rWUͷc0/߯7&b{>MyL1YpmS)]}&$6wWi?YǕ E7!1;MvKB1p gdNO^;f!"Ѡ19qa{;q~/ G7%ducyLKz '[fqBݣ72ub6irpأ!θjF}sg27_n1~z`!DCBA:#zJ:é/(g[|}.(2!/ CCTbQ"U饼npۇ~~(mQR^bB&atZ/9W},%#.,nfSZdtbӀkwb-$-RQUOt *"jU]p՛{¤N#9 9Aä{s2t[a|^] TlzPErY?GQ={ɾsY |Wm 4W}r']52@?~u?<}3vR ߛ9pv]S}ta^.>ﵸRLxQ/d$ƻų^< aS Dʲ/%|^X:7a0e2yM«Hn'ad&|g"|^[ 12L}Cy'SIQgOi,ɢ}hf@fhd KGWR?Du0\ZTEW?/ /V1#KGSQV-QtS7fzuc#FԍnDu5.oĘZ^ GP  JB@ppUJ)*g=驇>థLNKtE (Rh+S|DWCU(\p) DXp͈iZ3}IJpY7ѡ u%B֬h#ZpňI1DžVI^* (PV a&(9V&GʂDUC2-Ya#ǔѕG cw"cxD m+DOr:8ne}Ql-Xv Z J*5P@!Zی8N4 Gb%+_z^:eZ #0 hi/hMn6 vn#C &SИ xՔMI3{.9 (Ђ T K 24u2йzBeI8 }~t݁2;A2@ ]C5L}(VLbXcxi b)'_$eThkYMt2("{}?o_z ~_ičw/ 0QESS{2dZ[/%O\~~u0Ys|CH dO(MܗyF֐KY%px~iΠ$$!d0/./f,p:uóꅾۤdt܌|<ʅѥ{l]NZN2錳̜::kN[{γˇ;oΛl߻ּ- <4$JAueH1j7>yRy}pl"QLT᧖B{jF[T8\F2?*>Q ÃCcO8#t|K#p1 6^4^f2G3ٕ250[ ='̱+~]RKx+zݰ]V_oX γ#=)JuW *]hi|jH]0JPiިBDDr׭V+N4p/XmeРjU>B֩7 Ha;"ci C;m# E`Q7̩TV93u`>0$RijZ&j 7eBJmhTDe @:8L`_py.C98e#? ds.y19<2__Q:#(kyaA {yh2pȎ`"]iW@s1D˝6bpp!cffo-T-K,=2Sv{:Ʀ[R58MylC("ِ})%"OQixlA^ N}Q8,1Xwع'@okC 9qUi(!*릩$`jBRRƲk#U)Í5YeLQ%a~JO hjlt$nй:`&:#BŒ!= ,9Uv>dY\40YO_f͗g3n<̞^[y6{t1"X^+.+)ŻϺ}6I @3b5yt+iqq8`W̌d ADeJse>TDQ m~B%C!f hk[t SZKm Z6OJ7!WhCxml0JBy>T.)0! H5*S۰\~P1Β0î|uaIk v~.S~9Ta:Wsv9[nJäBd}(ǐ{uh;& %;9c2E)3Yy sdHguNKoP + U=b5H8%\"o%)覥/ hHۚ#Ȍ#O[0Ke<ҮYb Pi.P2$s\xf2%)RHt.F mte Uؔ`'rA&^ tL}.&+ctp=h*먵˒l`XI.!!53n ,~Ua%:K;r 8Gb`*P8'.&Ž~{}rg>8BP.ífBQWjX_ݑ6.!5lؐ:pMUN.\:p۰ b=<{R8oT/O̱&~NxDG}u.O1I'.)wO'+5fDݗIOJ͝bn˥˹-6.e}dfB44PABm%]q9S'7jyPp~~mIj.8Ue2_XHtQ?G=gv`/_,>MfyoYN_ ү즶76{Ήŗ6ي\}vj0/'l}*I~1 !xSpA$&fv#4ev$A~ޗ]`؇y!VjY{<^߇TYbVX#˝< "L!OF OD/b it1Kpl8̒L)3TSg>ldHԷDxyzMgmɈ=Ȑ3ѻ(,[iq )җhQ bfk ÉOM2ruX*qLd@V7KQȱ1}]KDհy>{B ,zgH@WT^S!j* tXSiolԽ|F=}/X`B_IˌcPK@1׊*ѕx[ 2gY$qBs2]ofJnI 3McM»=y8Wc]Ai gDc)Q Z33jSs3;[9ɉ(/XQrޮ_1o lXfrgxq" s:ً"F܎|yP+(sэ<׽31#լgr}p^e d,qs*(NȞݢ^J{9Y-эsQR arY#s4?׼ПbX*R4EC~`Uh4vmkǧ&أc=٣VokSye$QQIiExd@V53.HBʚ b{؟?[*Au վ*UQ2;_V)]O7f֧7$v[Xi)1请ׇCnXWcvX}V}&)jG7^k/Dj'őP A Jg+Mbv(',Km֫R+^Uo-Anh5ŞZ}Z"-F7O-*N׎xm΃h )*T$\jJõqB{EJ1+54çH"@k&.CJ3Z)ҥ4z"[)g0jԦ%"f4Wl.qR )˛%QFEMK5H f G۴磍#(VW X-6Yv))YS>UvIn7{}F"?\]^6[`}@TV!ӂloy& 1s< FXb벎Cw K֫_oZRY炠SH>a?,l8KH,dlfӼL 횗 R7eX2=*ӓ^?<K, cF\YXSyCD?=4B5>^pćy]iZjvh<)I[qh+k5RmYTF5X봤Bj𲮑*00 [Kb!@)z` &-56J)mJCDWwKF{͉MENej*RrU4= Cpfٻ϶ҳ,X_Ks5>[3 4ѭr/ȴкSOh률D]6G[bDoЃO7KZ~U3%S"d 9xkʺ\QR~6?d]pckX}~4܎kpTta/"|llb﯌3^(5kt&쾘/*_rƑBw)gPŞL5$^Zq|Ȳds/fv_#gx:̓ ɀƈ=ViGۨmaޞ~ľܩV/Ne/HL\f%ʡI/]pGxv(LZ5T:9ΒSaZ]YH+Ta / "pU rM RF>voIigAad/ 52Z_\`_H;F͂23#c/I/C~G7B,g̨<db+nCaC9eL_ 3_~]}u]}us_İPxDƖWC͕JSҒM9t+d%C%v``9Op'lJ]C(s:4K0G<deTjL x]"VRzKz*>(b|٣C\=0&k+2Q"W*C@$N ) Ed``‹^@ Ӑ3"S6Fq 9kkKPI '^:ʚ!ʫZh]MOZVN)H4ޗ6g֡>FrO_b5BenTؾbH ] &){6K[lˁStqp*vHѱ!n4Mi/ F|SvXdtx {47zvVWJi'.+%۔8,' aEnJa!Й´-¤_%.|<6c nBJfts3$ЃM馇 |pp&s  q!13Br:#$:`ȌQgq6%Q9lX̼ ק ڻOy~qݻu}'vdt٥-:ūdfw45;t"9[#0VNq&ϲkb>ЦΤJjSlmӥ뭋}WEՓlg'ݨRԻm0c' oM6(@{ܬaaaFLod%gjb/~5J|,(`B^C2qaC}!X/8-mƴaWVb3_W佶N(Fe__ӏhi3Ne(Wd 9K%qs00eHߨ2/B+ I"B;|!x4[kT~]\aߥHjK 39eZP3csJJLIՌ؟?]Ǟv{ui͞L󯬐*3k#U$r\uJ8B34~Wߏo}͂ż%# }׃R|MJjh_6$KCMES$Qd5u,W1GZͬ%hJ9T6{=m)*V@s}58ֳքݚmB YE$cM! bzF &dbu㎗fc~d`!gz4֣z֪!bKR.ʘcZ ȹ/o _ڞݓ% ϰrhyl%֚::XMvkHNn[>JnK[(ےNMn[1Pr[7FuH=dzvr*wȕ̐,-!nNn,u90i%4vGOg;**DQc5:*&fLwD!ޢT (}z\CtLF[ףq)^RbCkvTԿǝ]n-#M:/S֑iw^7s oh6<1ap@CJE4RgjknBڞld FbpXDX+" wfL?rA!7{5>c#^I+[]1D:oG} %Xj e,EEL (0%roPǯ4q|+q9eh2^m1o=u*$dCԑmF"z1S.A//=qԄo2衸sS'Fyy3 ƌ5g\[QGn&8u[ 3Ѹ}$.T#p/ާ mÑQ.է 80 m>'앨1JȑkS]9|hB9N炙b4J jH {؎&98V9Z%F k&>].bR9|,N..{q7޺\w~8X/X@^HɡW7(ڋ߅8\k f(qDr_OLO3@}tqE ǐV( K]P-}]=#.w' ~R>r"8ԁU`#暋DˡkSeDSd<9[>$3JlSʓepJD 9\&/bO#b1擝f@c|+t׉CYᄀ-g'@i@H[= t#{[g4xjրna/)g> 8.\\2̽w粼<ȗo~ )ٺE쓤m6X2 $ 8겢[lJ?.[ʌ_/%,I\ \14.mJpڂڒ1WjSZ*etzT㶡vrB$ԆriV 鹨eg15WNƚV%VB^;s*,1NACw7ohˤ vH[7:1'A& T_I9Q]؜'.w]YF+zl>:_V'&ffBUA5!Y,MW TSP /22)IbckGƛGHK4-x83@K$Y.Zw]簵:ĪFh퀊4j܂p~(`c f?,v/ Ņ?_CqB! E)Ũ29~(NiSgW^ ՞JrP\!9X5|r8Ss*%$F TG/T+뉱пF?0R !bf~SM(JM4L7`Ov`~.ra釕ϯEv2w21=AȼX+- ǞDW>I,fV 40)d2N݌nlT(TEO#b9'7Xj}F{`v \7Cde`DE׆33XԊgDD(Jra-_[)}YVDϻN(! (Ӕk:Ua2Q]p`#y3#k& ~U;+EĎ28-N+Q )j_ǰQê죵ʼn [0(y-T BOprQ>ZF?pdggq,Kۑgz9h|ۧhj yy'B+O>l 4A@{A=Z mD&L<ۄI*Bl&9nKD9WkZItÖ֖X;odCt*SQ@MP;\H4?~?E)c`|aش$ H2 F5R,-AhSu^Tv^|_q5 4(| zgϣ6%(vc 2ic؞0WF7mi(9T86"Musۂ+mcHcFL?85Z7/c5.@,f4Y)^}4;u/z7̸Cj:M *q/҇'Q99=`ܛ 9|vS x.UP0}/~Z8{H%HuD@l!T('FhvJCЍXW*'za܅2P)Tg6ӝ"9B.,\Ӥ~3yYpR|txcFO-nFEy|ne댹 jÄnCn ;}0`1Tˑ]hf$sp-iPu}N<f%:?d 2Iq?|E,ogMYOJEfKq$P N>HMN&ݪnup gJ<٥ԍdzW=5` 5G:COPѯ~?DPRtʃW_|)^x'G޷Y79'{p 59R C^ӽhS T.Was[\md"9<-Ngsp30=b9{l=~\GoxLk˞"eX*ɑJƭ F+# s= cQ ~eo󆏝rs\;+ 4Կ}@IX;N?)pҷlΦS|ҽz_)*42ۮ^v&Q;lg{7όfuŬHT(.3\WCo1O%x1 pKފ{4g3E7QS7L2uW␷d)̋$(ض|+¼6(wU\D!ڵC9]!x=vmT:>ǣS7 G!תEE b+ti*?KH\ST~(Ely(r:B^Fu:[ ԐùAˢWxm_NGWaNPTҲ7[B̾hӑLu:BB8_Ci%k qJNX q |4) Ch8Qp9JMoAN\ަ'qQj]Wɗ8CgCcĻ{3[MO4Y$`uպˤp]C:Չ&&V;&Fј)ayMB!h,F"+"S:N'sVg0eCrN\J`-kDBX3 $x~ݢ(e-CW'l.%Q%Q|`q %yONL"Ā%p1J~ &$EFs<5mׯc,.%y5U.$wa(5[c@I0L 4S(ZQ9+$.^`Rn ?j T+iqRʐ` Ʋ7Nʥ frA S5I/YP~2x |\Ӎ~gg&H>&rs !E;&R&ə,' ܢR}j0\SRf4~9gԗXKhIy ]^6sbS@]a&LWQtTc'aΔ0".PZH,ΗywYo[8{H%U =NfDEd[dz׾/\@kKF "kfer3۽8".N)u5}ʑKΞNJxU_we@ #gjL~Fq.زvWS0=TL6ſc'=5\/% {%\PFzP|,E(,}="{B`=J66{%ABXs_ >KxUQ_6C]y AE`NP-vHKDyLy((FXLC>B+;U;tejŭ۩N\9sq!3! oKr 7 >0b#1kՉZaWcIV5,\"-I N(ՌO\rKNb $քpt.NƘyA{inkQy ƋgDJDDp,2iWQ ʝ2ī9BTOˆؖS$W> $IS DxUe'4ڐNQ %#L!Cw#?C\~&[ 6uv3HS<>Swٰ{ yy'4mħ~5T6=."=#-#wsH18'1y{*\Mb`Sam⒞vrJ *3]밙-ˀOlT "7d@nqN/[**TXs{J6Y>F m#09c0Zx0ٖ% &{? KH^B [,%ua &*X]r(ӌ3۹Y[|qDϻNȇka jg}b7>ugv1qa#8[>Qpܢ@+`Y_2 '|\ϥ`2̀ګ"Yb/ M>t3!r7ӛ0\t7fꂱ՚ }7#I{spX3{FgUVz0,Gإ$lH`4lnҩat4`sL:YL~_ԅYpw+6>`)֬L|9|fcQ8{R4ud-3ܒOօlYGchG =ѽ7c, s5Iӎ%gWqdYy%ud@z~ ; UL]0t'4XGeIaA'D+c/bPC,݌)y"]puz4.~6Зn4e' %jEmʳ09=IZgY 4⍕t񡨕,<afO*EZ)v 2 kDIUjVI8ٻ6rWXr\WW%{J(I޻`HJ C +\%~F~zȝ|B;Sre/e>ujEW6=d0I{ʩ:/rK,^QmJVIYVJ.p畵N={1\;jVq8NǏԞUg M/Xz9 foMwt lXY ~۵NAƪS0z^@*V'Kr !!A`(^6`O_ޭܧ|1X|~_noY:@&ݼ7Y|LGpޠ;+"`oPD1W#1Cd8-'Uy*Rް3·\Š3dg ]` `+]\0Qr8!lv1ұА sxV@{S9Ɨ)T^Jk# (Qr1L i(dBD'F4*\mAlw'u7=M{z@!ZMKT%茲Q59:2tH+J7XL'eHI߸:G~RK-L7ȳZ%̽Cp_F&@o.oS7{a xU35`weNX"~*rD|4s {۝`I{z l<>'K)t%.- !lqW}w@ ZJuI(%cRi밑m$9ǠsS.P(j%%RrщeZ!B)%Y){ Ɯ )^#ԀAb8*˺4Qj:fԖ^t% c?H&,\#-8C:CjѩvE2`.U !Wi6 p Fh̵Kc c+DfStʮfsho +cZ%vav3f frr9(N;yz7RB>z3e{XGx#L{TvTFd1q hƓwEXJNZOapE zՌ<7UV2Jzx_KӇ{|,p!ZQ~N(bQQUD{$ئrX z,)oq`j=< пO\l۷ـfsMX.#]Ħ @^Pֿ8A"aNka)1, $F:M+jh1<5RLxO!5 X+1= Eīi0 %Kl)H H4XJ?y#eV%AIJrxBoBoI˹wz*L0~:|_WGG+us7Owv,j2uM?ލ‰&>y#Y<< Sy/VXbOX/Օ_?Gy|rMYK#Ѭ1FoY7!X\ 6X](2k_%Ѻ1B^S\ ܶn4E=uAmu;.岺nMnmW>D7LL] vTnfgYLϔ7)qwVn(.Lw|\?8$Шy|Î +wៅs!d#ԭ⮂p%~i?[ 5O>M{3ua!qBee]$aޅ`2H^^@!𜠜҉p&,'L'ӃL${#s)?A|zmyf?oQ0'Q잜 O}'՞oq(dYsI [kQm뷯1QXףhO2:B0QUyd3?>!Ss/S /pyAo(1*VyAP\oyw;#&@fpeZDo;SXUNFWip)B#{Xq}B;U}1d=Bm(f6y\~Id)x ERoo_S1@ -*NZjŞ6!:f"R>Yťh fW~)W!tPf<څJ(c)# ڌ}ŏEO׾ Eq8QH']Xpnջz[8 ٟ{]SGLx+ⵀD5CvGdK]F"ևj B&:z6W0W)xX7`T**)H>M{npmnw^ h=kJ4&;XMp,FQf88HZ6KχJo/oFKI~Л*|R . c70gWOÔ1$۫ r`9V vQqi T!&p(6,Ua KmT@)FB,R:P"Tb@ ÈKAYNHAb<6erD>RHƌI.,8!P PJ T+#/8̈#5z?Bpij$כ62IxUK">,]b p;')K~Xk"Q ANc@Q\[.~Cq/#\PllkDžylNl ևKy 黮AH"swЦKQ!Vp`v"GCn+JD XT9plgOЈ y5O \s)&k.Ŕ-KJ e@PvMVr|.("(Җ֘ )YĐK*?ͧYqJhzاK d[b]Mn4`/mOgBg9bفn.($v&W =+O 9$Ɣ++,4t(R;%dŞ6hu,N=2|[+:D9 /)a,U2 ^6 A2ot(rtd]+]\-sؑ֯feW`W?Z=Қn"vZ1pDQnV Bc +"g ,6^eǓ'8UgD'çyz}$ TSp?AO&f0/gD| )i2 \dlaTε`oBQaRLGv?ն Mwݿ%k"֡a 6ah%!{)EbUۧWG' Y~NLG@#Iö;r=VhE -9WӲaYێդԕL1l+Ōm)ꆧS*.1J$0x?}]b#B&J-^Sgd"㺬?j݆awmvۨr-J Xe%uQk(b%Kc9K ]z̆*0d6~(FI OIS'pOr-" A0rXB @3'qiJth#* &*q=lCf#Ic(9uKb|b} GLbCaɥҷI IS#cJ HPZ<^ن L_8!Dc A%AeaDﶥ&"X☼TEсߤoC|U|#tU韫4?-{yC&ϓr 3F?HzN),#T߹SV[K6?xTb< |9B$on/}㎗ 7n#6n{#) d3hKK-ϗCJm0ػDL0,Kf>(OIwP&""{E:&^5Ew`Ѽ[aT㮗R*&80{m;7ۛ'ܣ7Moo2|DFN)V&)ٻh{R+dӲS !27ϽgUMdh_F/*m(T`ЙBw0HM3[Y3QXv$N`w'<@J;qhCF UJ2;hF#D1l ^ez,ZѲ"Tq nJ3#7;ZQ>,[m=zdB ٥R:I7+ҬW:ǞS ;T9g+}dǀ0]uQ. fZct[(wYz.)PPέThY8cFwHu 9AۨkɽsBX@y~GPHzޚ `LĖDiAŪL.uC y'}71EU& I~ݐGAG%we -/w GlZ0N)v};A B,tltL|IHf @HZ%as$ltIK Cf.N/ z:Y̷}ٸ3u"ѓ傄K|8WǷ9Y^U!+ti kUAߌP59^teyLA&!%g|%`U,>S_wVMPJ"6W_'US|V Ws$A]m)Hp zPJ@6 %a-!7#{SX tq+ei 8.X 1@ƀ2QVJˈIE &XXP pZEa)6IǴ&BuJWY8%G.OwPDMЂ~,9'D P"P2 eB`,S+Ka)W*Sw{Z I7 $FGm㿛D@#z~ Vy8K*RC 1VH;^  \:lPFiJ6:dh{|z݃{R`5⨡Ri9/kJ~Ҫ$mT:!iUJ6^TӻքčcV DD,89D:Ғ ƥvԈX#Abg0qm%W)Y\m{Ref\T֒:lI=W"$FRr\I-,qn>7%L @ʡZ MR<lrWzCTľ{ [M{="Vҏ_C0o.>XL}OWi3zZQ}B%NGÙ9 }UWTkRI68ң\\l5B09ْ(%@6DI1q\.`^jj=ēM8b@:q*H>hfyW6ƭBz}U($~6P|h82ŕU W¯F/ga!0h|Z]¡-'VnDMV U6a+WHL%nҖ[{*ZmW(ߟp"V ߸t) 5x=_>Q?sE03S >ׯZE7B9  ]'׼H,O܁) F*BNQ0bO۶Jc #4,F_E&aez[j++>8/ ]u3phnhT.˯*Ղqf& N#̛PKTQ2/̱ƦƶTvV>Ur:LT(L2~G 5w) 2dž=u}wm2.cxY`ҹsEOw9IIY])Ҷ'MHAQU SŚ>|*\2*[ JXrΚMX|lNߜaBЮOz#>`&yBFNywBh !?^3X ʤ]f9G2Ho/XMH[DbUlcqeV0K{>u8L:6iw]9'̲R1wl@Jz1D95[9ٽ& J5ݔsB57׽ccܺ#%rC}{6_ XttT^qg4' 2aٔ#d%&aMrZcP+f U҉1R(HI5T+Jx ?P͈d?Ujb,σc`[ڛU'ZK4mxO>in69]:nNi\ P^:aDH5r91lII'`B LN<.1 Gn, wYYC 8<`t6`z+.{AW59g(V^3ihg25|1Vp46cS"WBX,i)#F+jfŶ@/A3n 8HWJiJ'FqoP o F$]=Eg:}ܚd 588p?Dw^YW*'hc epXlCʔ,q^ѻ5 D\"Qj x0 A1 {FfT2%iHy[ 3%$G+5TA357Xd] ŢtV9aaDғSϲD@ iU# rsA`5cPmpX)-2Z#ɹ*z!!T bu$l0k`Gpzq(0R6Y b ^)KArs.%!˄a*Xr.ށ]jiX)li(e] NKe6ƨ+CAf#H+CzJ.E̖ibŒ$4Q,Ya#-qmD(g^la TA0%Sf `"̔kՆ VYNPil͋ %PI/X1F!Ύgiswn2ZL#fd J-LzNhDJp $ Eޔ"ENPMY*8Q i&G\ e5ʡbRK$BwRo:7J *v\o~tzRi/45A@IEθ-I;o_mmhvɂ@=Nx>oXBΉs) FU6Pp>p4x>?C}9bMw`Y-y|h09-83_#pm{U7a.ͣ;_ߛxm?KEA˝% #pF9/'?.¸j8b]㳣T Ju ǎ:+b^81Go/r=<1eF6PCAgweO&iQ M i({Ώ Xt{Hn=mͬVW~ A9h׻7zg#!dIw^Z\{I ^l7I{pTw~ݒ;6Ե_DV;R[)"8& RjdwJ~ѝ[d@)&÷+n\=7=8yܵ~/ SX" *ɪʛV~U,AJP~"nYۙ{hDg<}1>D:oU8p}u٦HN/[i^:}Eޢp8E6wiOM`Djf'O2'L!Nke<=SR=Y)5u,Y);s3+%Qw;Fj͍0|=/|ZI ngW3 gg6ޗ(Q5lO%I] }wr] R}&`YۿL^ry'+y0Wm0tiJZY]7#gfuumkjPl2Өq/2VOZ;7zam,lga鱖me-ȁ6._IncQgkƫKbrbNo)cL)ٔk[-I*..}ry"?<H㝲Qpy֩QNBR><^W~?SsL%Y?=^Nj7gwf<4F4p?_Y,my=datv' mҥjXge&PbeDļF4V!:[hET6zduPR5 WjԭErMASMUj{Pq T_X!bˁU(gPzi|_6FDT.7AMrէS5[xoډ)FW-P3RW­{!*σ1e$=<ޚ{tw2/4]m)1^ɖj4K싻  q'.X4 w:434+lgWQ+*Bx~מ[÷^ˈHxWdfA8! 0Zk8c:Fo\JX47n: uNb]d\?jT?jde 4d%8+Y{+M?ڈտi?j ÅsrjA!+%A@ f;q伓y"F IgҸ.v:)e٭ȈDd9҂o>-+ U돗w knjJ>b8dfO üDيƫO> ?)'#uR¡*EZRRewR`JCfJ3&Da^+cM%rp5{܃܉.dnZ~6Lgs72f0tٓRL^Jؕ7e(CyΤs1bF(k Wx E!Z1({s̐! $|/FW]p]9`>=p~/Ÿ̄h3ʀ#׵w; " +7 d[0oS\ t97f2zR9oq4M=R 7X~qt6ޥ>E}&eF:S')rrAlL]ODY(M!Q uFZ -h1UGѴ3L#Ol5mO]ܰRFj8P qڢ8m$wa\n>3B!epj9 ("k =LVW{tYJ9"D8O`m9nhB%t | Yݎ˘2y#h%fSvy VvsWsȖBgVW[oS8[ @^W5桞e˕9aNUuΆZ"f5VF nԼ;g@9[)_PLNk rC4L2աY!^Ik'\!Pa1hbƜHiW0Ha<5)Vj-,[HM!=,C+kknHŗ/J*uRǩurŧXXXb{S("DK"4Gase"3R3{Wҽ$Tq|X{SփW$9߷AkB)/xZ"q_^S7q@c"QH,[.tXvG+aJoWJje 3baXVD}LY)(([@>-yL* Adޅ {v€_)qJ,&["xvB~]]}BDaaLwӢ>ba/֊㖨-n;'#ePYXjI5B{R߀v Di~Yы sbϾL+ЪsXE_'rxBɘ(˭G`ò-H`UrvSF춹" w2%$S1s`P}>Lo+i~"k!!$xOh'mV/G/OuEjd=ECʗe )Xɬ: T@ +"nIXӾ\M%I)ͨ4qww?=8Eӄ#[wwrfhl%V*b:v0øSfQD.VՑ{k6 >S;B/g7*8hL5m'XycȯmA1-uݩm3G9i"E=a\n!ISd!֤׷&um;͹su{>s'|t%ITg#Ur-5~foh*ZTuϔ]63ʣC痤6??r]x65堽XLJ/íB# Y됫֤- !Fg\h@U83,5fsp|-Foרί?g9:Hv(什u3DQ&vOW"7(Kc,(k8ŘC]Dl~y, ~0`8`~ of?/?~ѠXu㤑駱oOg>f՟d0x!(^3Œ,F-, 5>nyP4BI繏 F+"43sr2y @w4ՠD;Ln@rkV9]Ҥ_Ƴ5|g9/4( RË}oQ5F֨[,'5n,)@6L:L`>X05*@ g؋EDD DWRtejA m?{rb)3y.e~&Gb$GN(E4`x_HXqJ¥Nї~2-h( = |?>.va0,}o I0? <"7i73j߅P$×m~  XLDbƻ5ZjfSsvimf#͌2 1 u> {^o( )n(H [^΋˳()ٔMiP, H6_txv w|Vx_9` /B{91$2 :m#IMI/ژ1qBi_".9#ˆ]U21;:cb:L.'u21Ռ)~QE=zT^ԏ(1} B!jR8ʸAXm#HkFBYmLzD( 'YeLU}{el A,OsKxo+??ҍ))Y+@i+I\9@" x,1b9Sxsͭ 煻ioT;+')==~;.g7FU2O&1aoPDKFT}N1IjnD!ω@9W4AO;ΥPR[v/'A941w$ `y΅͵VFMS\`|+Ki\@R9ΰ X9Y0 67Ll7#v5;kh \_Mm!0GD džp/QX7Bs /(/"!`kӘ#/O )<ȩDW V"A$@$)nsձF,L$wX r3鐋=`Ԁ`Ja9:lu T9c1L R$@ 5# ]ʮXccFd f)'CXH LuJdl F\PoZ0Ae)JQ-+A!ĥ Y (`{ЀQnrNX( <SͰB+" l $`[o)l << <\1@*+uLXIH0՜ M aE9c;D`r4Z`%p9ZkxQBF vx7/vEtcv-u||?3Rp|7f8t/w>sw1OqV t^-'3դuEy ۃ닿qMՄt4)KtA JeV ڹܔ yQ`@*u?|E$X YW^8TOdlwyXRs.zd*Yu\L"G]YfL ]r!:GvµVJG\4C1yx#@j $1B5J8^`a 8R)*<Q6&)׫ HmQYFT0RJu !CC"faF:+FL ץcQILarԧWsQ'fV6=GJ U9GVۆy8N5^ Ac8x)(iqd~1l0͌JF3V wh#TJŻ|=V*S);@VSA V@X KNtlV#UΩB8ZV =^ 2tӺAYLPI5(CLEA+6"s:^|<eR2e *JziOnf2(ҫPLq!nWf֨4S.5IfŌn?$A8=~)邢&wtQsqV+\.NJXQ@qF$9:A~*qE'ȅ;1xƋʓv^371p~j{{{ nntȇ|?,;0?h:ǜBqpwŏ`c(J 1ʽlޙWzmA0Wӟ (Ms+}!G?+r~Ԓpm"SX);SA#֣;|K;FpOwʧk[&ؒsky-sI! (GšCY/)Y62$:ö7bQٶkMUpuS^"-{tFj#75"4\$+1䂪 gz3y{f}n'vΟrYO77.f=fW~?c<ل=]SGx!ϻڻli~c&ΰP9F/z=aT.O}ŭ'c\dP?$sti &=9{*Gwm=nB/0'}9;`34h%oc.RvwӶdS"%ATbX]ɩz*O.c],(]יhL 4yvs(1 했A$v;[rAv !߸CWyϸL9rJ1'+S&оT2V<̠[]opk<1%( [[w{V cWZpX80`!!MP;FxDǛnP|ṱL1iQ!NNr[PYb tfI%ak`Pw r<]/8w ;Saľs%“qZ,o797Ot_ ϑmjM &]DZ_q"`i#NG;qgH}QfoHE'ʞ!RX@ ;"(P*E#~\/?=%[6ԟOfk蟼 aGrjz:syP6Ys>`tH]t>MufفKJ!kͱ:<]М\])e$7g>z\Yb ,0<9X2 =p>$̾P3.r ̃&q21B' $Ji1dH1a{Copj%f#(ɹU0mKnG*R"ڔ>Y)`=}A@ mdͣOX(*nbAmu`T(;͢H%R9^Cפt |r?E?^0^Y]/t_.n}Pve'i˽[r{n`1DhEVCP0!RWr*FRBٯ~>j_?^uˍY^֥ [EF2Ӻ.ym_[4|2!0av?T췙3cBCrZZ6Hy!$L HXGh*ܾCvf6:(T CD R*@!cDZ[Iw('B F` ;3.n˝)4xö6(m(\bll}P{)Zʌp珒JXDWH+% ܿT%wTB L2rnTIyf3pi `P*a$h/Z%B A5ԶVY{SdO,hr"!$%s2!T).\A5RLT@Êr$)L4r;eUVɓ0J&BÖ@e+lX!g&^1ʲnsMI!Ԫ4(0I7<(ax|wrX8++ngmAǕ0{7&r)л>XC_;jY>@9 </k|q ),?3^ܔ_Rwxw6'jon:ȉ8>zgr "iPue(yi FR[f !51pcK& 9mCG D\ &I˅~lsZ<-|On+n 9Z;4_{([1e!aBϯã~rҘ4y)o4dLYtOW]WD=ָYUtᐠX{=zzt6t:3p[].=c]M)DtZgL 'H#ts؈ ]:F= NZtRS]=&1G+IC8*`m`B ךHF˒qnpw0XRiҿt5]/irakK]<}_b 1[8_3:S);]#5OnF0 pup$-0tOeoI3 ,a4RM(SXUjݲA(,KT 3$=݉ BHfDeN'x[ L;N9q!SJ[Qrj;}C "Xg'$' %r*ߒSZf0,a:!sIu#6>~2L/䢢OK f`A= rJާ4p"}QMSN') v%$TƃNJWѱbA+q렕ccVdQHG]4Ll|x,I"/)8E|Ugv9؛ufſW ˹zAc_GPOq1Bήr~zUn nduN at_OGcOn3 1GP4P >:ņ 7 ?2@Ϟf]GEY7Cm]p }:ݽW+fz^-~8Ç~zĎsJ8cX7p89`K|X>f`(pI$S du@!1P9'?#^y?#^~İNk+83Hr"1 8J+6w Lp0rX/J*,3 WPQJ@e%%XUA'P,@!J(!&)D-7 ɳK`=H8{I5ILT#47w“Pk=Ӽeu"?ڗ5Zk^@"hy?^1xEst*BP!3+ܻE⅒FPT9:tsqkA=R a&%(R%@bV[´L1¬dHT*,*c=768`kIktu)B ethRqQm6F*% Xʙ[ЏNZL4Uu2M ((兴ASJ#jtP0ŶV%)Fp' ڟrt,%-i>)p$3KR(c [ŬV2S QGۣ[S ed~ ~Jpb|({i&6CaY(Qttc"˽=*VXTJ^Ȋ*+`~wsZXr R GEI͸Q6p{+7fX\7=:PyZst<LZ w@ENF $8  2vcGAt,7%19+(Xel3,E>2GLE0&^O{sd/ -Űhe-A%#ްZ*I k)%<С4p-iTDKJc[-i0(Q$F/v o'cx7* rXԘ#4k]ٽe $ ǃYa08WCNP T=HМ۸SJa}pl}Sxƞr>D')fOSc rӣf2rTw(ax'9O{Ą9p?&I.38ʌR!Ndqf (5N~qWHbV4JAіI ɣ'xI<~ҍ]f nY^ YlD+5!7e xaF=m:.{(7cn0V&<LjͤB4$K>F+voxj##)QoGW7u h&~ ã㳺gxUٽN .|8VʛbY'L$MjHr,0܁'NzG(}%c?~1w<ƌ= r Y0^=u[>a4AeÕ0^b2=A&ݐ Md1FȌ󨓅1/zzGw^D="#WoB_bErjT0Er(t^6ǁ}<Ѽ{OIK+i[?mݿEi$yFPCΞH.2*`O]σS)ϻywyw[eNykkOήOH*3RU)0ukNI"_W_:q_nV( % cdY5aR; r1gn?8[.Y2 Ό`Nf}_؉sA2&bGPtF͈e?~PbB;N(F>g#*2Ӡ'.G N1¥͠pj+@;Kg dTO&C)TB R-dRci9] N]'6'poj?{ܺ/3ɸ_\gjkgKR*cm|[IəT6(Zdl9ۤFw-<1U\=8P1FuKۅYr_OA֝띚_uD!0`pk)|rv5yv ѵ} ۧD$Pf>-;A//M)LKM{2_wr#vV|Q3_0DR̗/RoCoDHTG!S1G\+&nL)Ŭ/[ԶT([O\rM:e&ȊĀ'aIyg8e4p /STq%ear)^S` /?{:2y|n:< !?,&8|ȲC3XP Drso2펏sLpK0s Ҕ!B hhyh"&_~\+_T)`)T<@PnzH!˵aZrRzaխtaB:I&3"?-N56i*C ̇)㩰J+*uo.%N9M(<: EFYIY͜fYiFٖSde٣oUkϹfz?S 0­%Vq%2,(`~6:Mb-5%^hH7H`:9NHЍ^œ~p&kA:]o.5Ir^ }Ԕ,b73M> L^l#9sʯM> Uׁ#D4Ue!хK\!7mDgҷVJqFu7'`G-']X6g^ؓb5G; gL c21Rj#Y`"cĊ#*v͎+@m0LbA"NDHN2 +R9J֙ $5i$`\`i2J\bM$ I*g "iN샘$j j=NOc_M~ϧDK _Pb+O'\'_ΗׯJ7G|O_nr烗wp"b >E> ?=-W/*.gnlM /32>m ,1O.XL J }]+A_ERڶDUȋZ3Ud)ڇBW?8s;WkzTk\>Cu/$fxEG G34 n`!~.[V#.6*"[u xt8+ACvClUzoLY$bP#"k`r,fwoΌ.VQU)"ՙͿ pb#!K0/Q SoU>f|R 'SP߭6vU={s@])I:Fs_^`>yuOC#(*W)T# LJ4s"a0by,3T)'EKi8IRSfB{HL,x)3 FO 8VQ'R˝$6S]Ji QG\:O2O+(mT=$v\Ѫ I%Ӥ'd)7?8 9I%K.pP!@uz|.24ZjMZ` j_aMh7i:[ UGlDjͻoűD/G= rLԕ ͺˍz\?|fO )O'qvq~ш`9#M/dz ͽ1?N`=yO w 00$?\It1h!u>V`P5N 9=k8r T\j܌rX%3:U/R kyr~Rxvt;PRBja&(YyxbnXDV9yGs]{G4f.Q r׫b4c5XLLtS_N Ւhc޺KpawKǔ!,ek`gK4 yvW(lCM.·HEo'KTvQ(k{, |9˞~I6!!F}KUɬ-vη-4tB9 WϪyL[ 1$c[nwji[hݽM*&5ԆmUF]c[􈤆0ZQwP&d"v-C-T ZN{^sW.[&[G&Yժfƥ. %3w4e_:~̇ n|VaBkl6l5&O^pEǕd (0qxԛp$[,DqDľ-]ҿJѯeF,Ƨ@K-X#qԕ.AM]grX!ӥ}FX_(h1{Ҙn-~P? > |s/M WoHy;X Ǥ6BUޛW?ay'մV(&՚(ќ\%h󹨸3m&vա$3}.~(u<̿˥nfwt.-1x6R*__m?Ďw>{~ZF[U\o׺>ߟaEg]lo[ovʊt I;(M -щv;\t䜷$j&$䝋hL1,b4`HDf0r.cCB.Dr 5a8QGKo-Fl,7Gɬ s7rZcBȢC=$䝋hLBRyj{b11hOWKL;nMH;r-ϲ-o5{BUvb-t|َ$UƷN/>G6_6W8)'014a";!xQTH "mؚK/g ~RF9KA Hwլkq`k~Q]牆@4 y6&j+Ze̷9äƏ*I0}Ȥ#Cm1bMe5M!ϩ9]ٝmPZ pj;9mi瓝|m @߱ZM%A- !څF5<M1e`t3K=vA{!GJ=gp1+o ܓ j.%jt ~2^\8sk)TGnDZ h?"io"2ܹԝse>me`qtPmSp,)T/.[|Ll,/BMyH!MMz) cT*,? Y&؂0y|6s{S '^A.~POhv;n0qˀ+CY^^^e*`a 6QiFsNTX#J$KTTJeh)Ճ|s/+Ձt1(r}i/*r7n3wF|vN *݇D ^ȫ נӹP-HX[#D8X1a0N1ʀs06 *^T/s;"Xqĩpd8͔ Rbb2f?%\%'(f!gV "+kP2˗!pWyOM_l[f~syvNbsu,cL^F9"]M> U#9.;"Yf,T-W  *Y1ĭʘT,*2MBX ūR0;"`1I(KF %#4MX;y"a2fԀb$3.RFbE1alHSt84}͐% FB|R1OARLH$bRVõ{[8| =%?&k}- @z}F[_]lM/FT>ϫӟ,H3B*!tXīڃrVn}%My閿 JyAwoQ0J6c~ZuTv 0:`>̦ΰf&UDG*S" wJJ 7d[*S1s i%Aj jҸO^K4N3 sDBs9n*MꠀZW<  5&D!h p& ho2ʄJ%Ԙ Ky%-2k!Dj% uf{罟NNl[?FGmѳTӲ;4g=LRH>lۥHoJ /ZtϷPѣX>+Lۖ+{(Ae/#9jϵ̅LPI>'3mW QQJs+pp`Fpwl\hLiH/=;֤!rLTI8xi~,7GJt@]"Al؛ Ɵ T%^&T-ޤ:D]!uys%i b'Bȍ{]plYb5O߅Xh˧BWjm@JwېN+%}M7 b-%SfĔ!S=%2% ?l[0mdz8Z+ܲu=8`HK9j&R8[)pz<2ΖuWA}i\f pё߃ Fm 򣧳Fo *쳞 $6``-vοl96F[Q{m?xKN>A#.8ݺ|~QN`WzِˆW}}X5ZS޹ךc\ɀғӳ/֭3x2TzZ}ٻpuͯFIjy; TovcRa5Pe${E!\J2: dRKw\lVxB֗Y^w9=#޼g %ف7KWqzC4On:Ga8Yrbl{2/vٽg1ʼnFAL F?)sV\Rjjbpm㵢Z^yj: 3gkkиAM@@Gŝ, MRg蜠{UMVH$"$!)bj\{[[,8LjeזP;&-Rg>K,9a3rޮfAxA $l,"+@TK$kkf+WgRRU<9L(Uv~*," ~ÈUn!$i/ke1" W]⸫QdDybv[tp;5N|èiHFÒ%Il nmX}1zw^sw@SaUnn:+-?ŗЛ vlZIX^L @HxU 7% )I;O)cQ8tWk*ѐFJ7ˎ@c-W9H3vL";ƈ-9D[أ?Y}t; 9rRH!.B}~Qٻi&G h_Yn+f%p`֟xX,!="D{h8'dӃ;p=)dyMΡyc۶{&-9c{mJ`|.@iZvZ1LȐhdkJA\@y/ Ac;ժ>iHf)7MȑYn[f)[RI>.ok6e3^)a7$O) @d:fߢ( @{ t(8;8}z:2{}"I裷Vc` ]$qHY0-u^VbW6x{M˸ >iLBxߏ1^3~66tylOt;bD?j/XZ?]l>B:9&">%[k~1ZMk)uhHp7{8g 9"ZnӍ}t+e#=7E;)F?'n0=rvAR, [٭ƻ}J!F+(nb/(%} ~f[a" >Weu@C8y!ጆɝ]'g:\HџI S:zE*r+D1,+& kͳSJ\Njas}gvr(ab*yH4 &*9 G$q'W҆)e1mUQҼȜNP1tx33`Q;(?${> =zϪ8~Wƞn~Au}>h8Ѵ ḿRho0d x21^%ѱHyl-P!/adhtBn&c EDߜ@ Pc_05k[髿^B%*Zz҄Sd Dy^`gO4(: /B|+)}$*+m,r b"%NTv9 1罰#j_QaE>#U#;_jG YLxI4ՖRCj$sR􌰪c9B9"%J g\5SV#m5VfM<8+еC=@#bjt>JK޷Y[<{LzΠxyʗKw5JLjot;gTmgWVKE.gڞKU8vs׀_%az>.ՃW%Io_|ӛf<%Y{idgE3Xe3 !(&7 b$e",3e-{etXV!b!T*l4Bqs1ba) xO9XQ[a*ΰTFDR,-5ڙMKFJK]ڢN_J?/-~aZ0n|?_.Ӑ=>ϟ_;}57OW5YLDw ,nyg)9?,3U }@ƢOFZL# XgbA0{~u~~GzApt2Cy)Q@r!~73A04sSynٻ?FD'"q[Rf ~.,ΓXrV W[ԍ?X7 m^ȳu' QQV]**ϸCIKE|nB`)zY*Vx E&ɯ},dHFEH0S1[Z@D3ل" TSyéEr$RZa5Yp+33BM .u(Ď3!51Rarr Xg;QR0DZsq ۨFBc:jCr 5?XC&rFTҊpX Fwx/j/B{mP%-%zIҖ ֍>Aȸ/eK!" /mmmI8ݹJ5J@׮P2Sn } A G!XYɔ"LΆ?xNs.S[Ϻ+1XK?=3ӻy[,2vn'BY>[> *Af6[b)ݽRrv:پ}ͫx4rN+GuE6U Uʋxqv5FպmH5SIR(Ba@'$'[=_'YiuW<֭;cE,VJ_)W^iMIʼn؆̍ ]mŲpDJ)F'dA@#񠝍?S.I~8<|Eg,p1f'$/ 0 `5y\$W|#`xDg' n$HANX4>QM-8Q1Ē fR"o BZ Y78!^L d YHGH_R qBQh-!.Tu=| z!;2H_vdVcڷ%܍4a@_ll#QS~/g[7N4/TefKH4]DPǢV{_r@(-5^jL刡 }RѿPjn2őDf$w͖ڦ LzqBܣN7ivRk%FuZKGP5DA䄒`I0ֵuFBM(E2sl储b~鄛^NSLwqFdO>]p{|A-Xa{#Yf$thWvT^C.ȿ!^'%28]#B+ͳeU]Nro~~$*Ì]ys,i9:PNRhRzNyh:2./[%= uw| ER/(8_5r UPS쒏d< Y1ZbwZʉ,c}A^ EK[qr<]Gq&~y۾դ]%\C+eG)N9ysM3y>5:זf}0F( Zqx__?jg2lM wR[9p=?'߀ɛN~+ޤs+sDT3V'ufo]lJ'@Cu)j|]})?->[>F pS*)}Z{?FDr/ADF" ⌚Z9eK ]|4J .圚Jo.nSoΑی9ǰ*\2D䪍d\tu˶  -BC~`g|ӁFh^o 'gMS)ţ^>*)ںpTJ2wEɂU/Y(lArξQE"IOBNMIw%\%oZANʗO.uDwr䣵bzڿLD7XGˊ]l+)JZP@ y|1,|NGD,)'fOG[9†c9:#)KR(e_$:J"M e ll$QVi5ɹ[OOQq@UQ]K{ Vz תӻg#10 pL /Be0'%[Ͻo2\Iĕ wBVke7Xsа㦗cT~XPIuq+75*e–~s 3l8VɂVďK"$ytF헎&CöiQ-C m<@졨XU[]8/ـd3C 0BV?joAh5=j/u[ߘRQם a ѡ6r (0Q3 c]$]#^gքƺ3mB۝ jLm9#Y=twJ][uM|dwTB ?>F6]CNҋx!||W>gQ|{_>xjG1ûߟǏ&ٟ> 2OQrlnP=~C/lS\x|u@^Gە6G_~o: e2~eqmu)Oܻ7y"sGB*UúU@n-k)(:łzutH.?Teow ?XwoQ5zt; P{5硛탩f%ފWoۖ?>?ᤚ#4BN/ TJ}6𾳩A*h`о{z,D d^˰Ly8 }a0$Hԧ:J؋sdG7fpwGf(#,ç-&>g^ ׾Ɣ 7PM7g }΍>Ә>щbci `>5ISFeW]D4R6|gǞ=3iqc%cڢ-Ԇ$ЇGGxPCٳ8á<-rcٲn!ki#Z ZJ@s!ND DGZްր[Pڨc.ʉWT,j#oO8}6#kf8Dz/2g40qdžA@]س#tT֦.IXgc|Sͽ"^||bx|b+} 9Ǿ1B*V*1O^w8nzN !wF V@TSـq@tL_3!\ f OǤRNA5aGLPIlD%@&`.zi͒L4C~}43+MC$;vGխ & 0vO AIxdD(J0 Ϗ 11>E HF"+HLdD謞ވ^ K4qDG |w[d#o&J^$ ؝0Jj:{`!1%B?C[1K)+,ӵ _K; r|!3j\: R N: [}B&̈́VLVM5WfZlv;C\ wO]ŘaFI4{լѻa7JŎ{dRI GvfkR96IyoCP/ qy)Gki}l'"xRK:4ztA7O;G#i+j9mHI/k$b2*KrRa[惸=`yv˫EWmŅo֚NRHWK#p\tՏOl gOLfD^RNKmW=r:'ڛ])s\NR}8IS:Mo?~Ӹd>D)[O&~35b5wӧ= ;CM̂qMPJ6#N{2^&xge3#)(o_i=Y sN1y&F{&ozRٻm" #L>NV Gx51Rx¾ҾgFb̍bdZT&OV)8!h$zHIǂJbVLv2u}jwFh-g`0,Kv'+x3\188Ĭ oΙj]X:_ ϳa [QO@5&@:BR Ԅl6$!Ǥ QE5|2ݗ&I 3 B8&ZozLek>ٜwK>ŬTIG `%?PA :IJ [E.E 2ĞlzF/;o)Q"$j# N"[fd6@v7D{iS]#Q&߿9[zGSf/V<}?=,㒙C3nM4TYv$Ihe/=yYq`id])i,>y;W7!~կfq聯:şN=:hu*%Eds*?};[61RG%/r)8pD,R!(o@^(>Bjc W t,nqEC  BA" zGޕH6Ts ]j+Q(NX,(`J E$E$Prmal(%E#&uHdEf[Ij2laMTV% u"Rh!";!z92)N P(nvg-$M(8D>qe =RkU.)U[U92,~OWe$~#   c@.w%Fyk0ӎ-V^./}2) k'9vh}HkmVElkX=:բ1meC1xzGE*T%>r 7y= -<ǥ$L@ay>HYK)o,A{9.C#$HF "K1 ,p(4<̙bLDQ%U i#QXt 4#a,86rAplj+7#M? GSU?IKZ=u3:c '%/qm9Zd?/p=2vB3\М++!o<}cDITQKI;PC~፺6u㔔.},o}s"s>[I$V+棞ͳm^LiaA&LrskC W1o؞ޢѫUߎYqRi;^dtIa woznCyn "@4N%ϡvY;m{kK( g;@vv02eM4vt }^:>r^$ah:Bb N1HŀRqFMgƜgጷ :P`I>o}-A*Z; q":rsJ 1rE 6XRsi@ LP5O"^EʯƔ#d{deMVΪVʕpW]B]_>-]Q!JZЃz,w+pZѬ`y!U;<zl{*aꕈ 0/-sopz^D t:ЙAyzPo" 9~[i轻ْhjw>QS Y)AUEުMPclˣ-2mh:)!zG$NƩNԶ?9؇VEIĉ=4nmd jZZYmUV@{k)GaN0udĺ4գ約Kt4wYhr%aPwu:IS*5W39a4EV?J 9Ffr/:)6EOrTspYĥK|$՗Ial{9yL.o"#s?y&_oo|{^*-X@Qw/\ s;\$P9vodO8t7 eF*LSK2s Q-UWyK=]VY/~Ԏw&Ȕ0d7*XӠBkݧ $Lk3:%]?x$"h4 h$:`y>mN=R!1Z_'jj[)bo~=iyTd@g@z@w\2.0f)?S.>ne0e\L!-Ӂa~t7*|pd'"a{? }=/_>t?*{hț#<7I!A9s ˀjC3JjōDP%&raQz;Y[GX, 7\[ ;L4$@+c * -s* emliC5K)&"G\]rp{[} X0Nrqy|ݭ^0š ]S 7LQ66IV>0^Ż)&;zQwy? 3-!btnKֹ{?~$H}R0# }j 0$p#;ZY;[C%^NSR3ϤvT臌uI`eQ;VA"O $Ϛ|5;7~n1`^9Ofֆ,U{zVy[-PWVŅYFuȕ<7ffϩ#@vu5Vw9QP$&W]G_Mj滋s!'eF$xVcG^^M$ATW]zE v_JD1.>N#|I";G(.Ps$4sEƃ@Ms](x(W<}\*AfG.=e~P嶯pǞ]<0C2-__![J_)Ϯ'=n6w7O2_K ~,jtrQBd! (25P) bhMQ\$ B Rν._O]IfWEGu_t1R& >\6wW?&~!9 D^gHy ^0:^ToæI6VS, d!y! 3|w. lq Am J8ƽ/X+z9J;գSmȿfrw,sMn JhCvәN+Q`ڰ2.%C͵ LeT BA"MeW uHHwuD>e fM²cXy2 E!qvdG&sV~BuX6[?y|Ӹ :L<=h. r+Oa󊩕Ag[맍H] NĸI['YW +?)V"jׁ|*Z)ٖGgJ``$s=mAwsSz4m7;18ƝdK_}ss!Ikz 8ρR`J6 y.Z(!zuxKBNFqc4FsAh{R?vWE4xIMIY^I!BAH`DhaTK%p 0FyW ,¡0pFLu+tn:IJUڽ;smz%% uV7a.1Nx Fu0z|_)筩<d>q5WjhMG%!܇d׎ϝջ^XV슛-+vE=b˫Uz*]Ih)dwu׎uOwv1(a~v34[ _;\s#u:J~NA*`2a%t>aiM!aW ~n U1HKio2-^-^bK1c\ R+ Lj"G\ [ ֜J(撨Z\u_fi0FNaRP| ~%(c9VRy) /@\6I*ץjFl3)Æ5KD8CN2'( 4sۜHpRKg_7I2,n)'Ҽl k1Tmu@QOIuQn譡=@0\\]E"Iu yqx8.KO:uJH?|!ďrrBb\ kkS\`ŐZJgbD`uk .tMU>vpay2F~~I-c~c@4 䖓IZcg-K˙&~=IxCkէ,i 4˹e ҹ5.Syõ#ذK[piv_ǧ>R*&dϟŸ]YGOs}sFgjߑoOIt6_燒;)dBwlr?"[ϯ"ˣH!F{XZ4CdU}x(ŨF/~XǃF ̞cY4F-uW˻)*Il7k,XP1iJ+ҍ)> G|3r~Kg 1 ܼrZC݄VXCp7f1UA~/n>ҳMy['RP uZTJ5v]: #Q~Dxn at */p$}_?j,A6+[-sk(@A#|R7+2-BLI 4=VtT(iu>V($b.˞(qS8M (?:H(_:"JaF`Dk{L>Kԛ< "s(hPh粲B韗#LмRߋ8*X ,;ǩE4NiVCwagG"7InfLV C`;bo7FI>l $cmdI%X,ߏY=bOH8OŪbՄ3%o.~ZSc@)v< ySe"N1Vp Қx)  TFav{Jyo:@ zY~c9&'-haF)s"5 Ӛ(n\ 9JP;f{_ 3,KV!ʖAܟXI>V)OteЊD2q/bdG3ֿc&\cRd,#'ZjQnI2[qi53>427bI  $Q44(j(*l2S0<6PYuV+]#7e(VҘRH"jѢ!hg e*-)HmOj&:hOK3hjXܒ DՋi϶wcPa sʆ (,7$Bĕ`T0 (ͷn(NT-\Df"t EEUR"MO(t`f0n8D%zhm"kOol]@ Α}ILdfiP-W7̃> #we)ͮپ5@B7εvf)L }_*M't#&1 YxDL*>CږXÜZ0qk J>g\4@ۮb6 O-z9}w5Cy)E:hQ|cWWIKʘێVcƢ$>ў;V(Hֆ^ۋWU()c b8=(ng3/g=q 'xUF[Qqs}s ]J:x=|ʺ 잆 7c$6,ŸjڣG }Eѻ`6+R`rZr_XkBt\t)Q?I~VQ\Y i `o l0@BS[~:swձ/b۹@s<y{\R/)jn21)?OT"ru9fVb4l܋7Ko2]|;ERrDês7v٫ˋկzYnWwN>Г %^ wnob_H WJ`w=wԂ^%~أJI/7olvTCl]tG5FKXOuð9Pa em# "Nnx6Z۽rOʑ,r[d%kq;X]$eF]wd]h ֈCpzގg=-f SZY.ʴz;n9Y M As qLp'`N D ͤ`;QzFR/qx4:} F"3 nCu3Ծ Mmôc{q3zE&g-挰'2'&Qjh$ ]:L=J|Y3=(Q(2ڹP+X/}Yc؆BvDeE?|qN/~R59D6V/b֎5~ G6,׀%fQfQ8Ȝi 4]a,j chPzHs-"8ƎzܱC{kA{\yBIbW3i9 $15Y}0(æ3Aj3 "ΏApZ*2.}M2Fo/Km4^?'+'fs ?ZE1?^T>J1mB)L5${n&R/z=S]W-ܻٻW߭IdD)E3u9'glH8@k>*E[ #=Xg K!X팴ƅ FR M`ԨӀQPI 1D[2`@0r#eBc9ClORV:^<3wD3E?H ѸZ*q q٥;Ĺ!-X(X&j#BݳWFy]>ڴwB+RuE鑻~Uꍘ $DU?cD5 i5[__|͛3H`},~۳[nc>U6~'(ۣ/o8.tB_&H7|xQ=kZF ̿cm/1*%¬'XDvV~.{{x7 ʙ1W>*U(57'O}ry|_﫦^#Pn0&\q-!޷hCa?{rUr̳?W]mR?ufΐQ= #>N%0 1G-v6a~NN(dA/rs؛<< {кXOYXy"W g+L9rGtddAwi6? )^jmEE5dC_VХV_NxrۍdN>z9b&uVˋjȅtu[xv!t*\_͠Aﷁ-X-m˖ xPh`g1,wP*ZǹOoo.t:X%}+e fJx[g$~IΓů^^Y%2 %* X ˔^0ɂ5}#Qw_~EG+~Ilkљbٯܾp_:@MRrB2<9Q:ɨ 뻩(co@/Ny EU!mnIjB?zwd59u)GrN kԌEEtJ֔,VX{L <,dYҧ@r:)(' 9ܧ,~b5̤h͎|,!vȩ‰E=+Fz*򔞙E#z% ˠ8z kKqo+%{(~%WkPa'L1ua,pR hPgbr;7nXd|5[2L;x/~Ěo;<[eb{ Yq!wG9aTq= 04jO[ӵzW $~RIga'P'lG5S)tsc`J= ˝ 23\7ǫ|H% BR93O裥R.2mm(+s'J 0DI[\h*rët6jbLu6WuG$hzgv@3Ѡ 0y$QMu/~K 2'jbD)D'q`.D0$U!\;DPX!猋5kZ$EAg-@S`QUҲTeז/HQBPuBmGY3lIK^Fonuq6Yڧfw7gml5 %2 nlj'+@~7G;qgO;=n^kRM$lm9*#n3lUt2[M3l .*G%cd%& aRFG-J\ݪ@][o8+AJx)觙ރ9Ә=3OHdq2ӛSD(,ɖ:iG_Ub죑6'W;R@sx#qʅTS/SJ(.HBNYݜĨNEM bi K/}їoM'J}7(2|"sw6 +jGKI*T>)Dd\{Rih+(sQ^Vaᬳwq]Gc0dbdZ ~OvGL$QzPdJ@f<&=x㗧?mhtG%r jPF~/Mo4UW xp#2cVEa^?b'w92p-&x*0Yj?i怩8(%oO> 5g{=#:P=??;X&jBjiK- Lk {hmMGtm>n 1/}[O4nqmHS3t{" V4b/ Ac2BL33o B YՌZb%5e2K]S;+Mx~&M+o&ބYSy3E{x@@u/󴣴㌐}I?m͹aB' 캺IrAjЂ#RdPC.:}F qd-(4Yg"x|j+}'f7 ?mǟ*JU(WU)`>GqTඐKR0UDZ<Tmjvòtk`ϱtrR%NQKٌY Z˹F9^)SU0C,-L0eE]nvS8Q|:۔̿ZA(!\RqQTeP}\2ks :MwZA (Cyc \bk0H'BjC^u\n|\ al6" |P P! S{JnTbvA LH>MM%.sկ(W̘ZF%&EF$i6 űBIJسOh3f95-hG A}Z8fX$dk^^@(.7Cģ5uP3:)HnZ+6 *1 Z,H║%w!F*' MIKP+ura [8/uNu^$ٜhk#RkdLtƆYQNX獴7η19]*hW\BvM@bȓ TwlЌs;'F*zؼY ].=T6iQ?RkޞlCxS+紶"Mr iߕ~#uk{j6ō0{b1/>e/Ҍڐ,j#Jhr={LidǨ.}(hī!T`zmhb +vdE4E&zhxw>չhjI ) 31IMZXB$x+,|NR.@0uHZ#`){0 PM pńRp2RݶLnuC\UF3(b7LP!(w;W$߽-`x_Czb[j8\aGg#tkb_$֬{N9V 5J3cԻcZ2 $q`Ä_lD Ev F(#~ɦFkٛ{R!tԎҾ%_f@>w gvum[JfߞDu_O.2w>a1ixͧ2|(_#֍$L]GdͅxRk>J3r*ifCH)//IՌ̠e adPwFde} #5)E32$3zԔą>oqkj݇a沣b Oc^P@-s'ImǿZ`'Id5L+Zgv$@rMh9pAPx-/5gb_2;O7׬]09m]JcHm "jrp$ `TXnvLtᓙR5a)7X(XM C8D:=|#ZJc=Kb@r)Z͕$t wknr}Uw:Kl=C㱿 &"dC@ ֢e|;65i7hQJp rJ'A?H.m H8 ٘1,#g]0/]xy͌v6{_l-&'nuMj9sq=oqn9 9rsq=H=\i͍JQɛ]47 PN=UWѭnUtnU'!gNK/qp1񀙡((@ 34+-Dr{jovEXR.7˙y07|%H|L}b"b#fc2fv$ ٫;80Cxw9:Z1='%RYYi&j!RM 0Do 9N]$jcub!lY-緧2I{AVN IřbPnE&k<'H!s\s҆ے߭{mOu =n@$PiHB鮟?ӯ{'WF2ѺTk5\ԫ$?RZo>wf g1[?38Zzh?߻ߣCGjkMmǘQ'.߅##ٍ\,F|ǥ?C;X~U /OaZaCGr"J>8Q EtڎDٙvk(8Qu!!G.d?TsEIYIأvkAvۣ<[hvBB\De HLHw%SOԑuqq-T!֣&}XO?V=M6~p(*qx=K+[^6ܾ6؞i_><|ߖ${*[(?[oxa e{aSXY)ϹDXR9:b ܼ^A&_\p)备FzGw'VR*G%:4D Pcp7rA*NրBP[\{sjy.L=H T#\1!h)z ^34?Qk{4J$Ȏb~uWYȊ6c7;t@|,q3CE9|ĘH厂n>o> (vy>9qqaBxl\$Fd^aB{wdf($Ga2NzџM\`HaH6a1w3`̦>|~j<%ՌRQYitjm 9Ĝk$ڗCĖ2nt7%^}QVgqߟpCLM=&rSe wK7'Qֻ9DZ~]P)쮫t3=ZOy' y'餇:IQFRbcլͭVw5kMe^+w.娻QNT€ewΡblsiMPNv-?s'-v-GҚ[*kyѝ3r ^qP 6$)[iʃ; v-_&H͸H27H7B][o++_@2Y$#xdd ^mO|[I>3`en[ݭeld[-vbX,[z%dٕmLz{:@ouWpDd|xdv=|y/|zf 7l m1)Yd, Cb(C2czVlf%Y6NؽL}MWgwi`G2}I@ؑZ.l$>s-`3"ҩw\k; 8C\ e0;ju}k=-v| oֆx CbQ]d6Q[om&/wvU ͽAP hu7mf{6_Gm~o+p7wk%0vI-t9YξB%.Z+Yэp fZkmlrmmo^G@FIo%W0bKOL3.!i+uv^3k@,6a66mseMΘuZ.>i3Ig,:#1z] B; )L,ؤc6ANW)uBd:  ttevBd)+N u)H IIWzγBORwA9[1ˆ:ZOS2).9it|+m#xy]i_c>ZՕ5ćW40KZiS026<Yh!hOhx #7%vyٞ!/I>N_e 5и]s)SG-"̄ $` &L:9cTVE&̂aC.J3VUgNJ^ 4GO1f IT vEtQf?|wv~hJN#[Ut?oݓtVlQXu.wA\ wӋR_%DdGVqm(™_oߖ5J$2R3-^~?OUW ӽJ>CwS^&tT7i2Ȟ)9LO Bxs Yj+KϽ(=~VNů y"J۸& :MiQڭDj.$䝋hⰦLJۛ" rK ÃU0WېRb3\EkhCOIO!h#ބqgCIh%pM;u֙EmUOww`j`Wp#F_fnԭMyv0j;0CM:aD6[R-=4ȿb|sI_v)^eX u-iWoǀkZ!w"MR[V;+cxagc_%Fan. ;PcS _S;XRLAs wJ R+ajyJ2<Λڰ'aS='?@.X.O;jB{I:Ea@erf|{>wAa[7=?>8Ƈp%dw<[{muFX5ySsz$ jƶ>Tng+tJSJ;scl2\ ~qp>B'G}3%;BhB2=:i2Mt'CAM5yZiQU59WGB3) *D^P̤h 184d?GϗG'A/:̛K&_B/?Me#k˱Q*˕X:o~&BE IZ0Մ9JCYqR":ș$2MQm$d|XcY:F{#@tv!{9yV(}'233a)(r>Ow5߷]\\^>Wt3gcҀ^s/> [Y8:̭А$ZC|tv٧ҁ@-rITpb :aֽ AEњV Li (P•;\0Z. WĴueQpGQ.h4i.3 8gA&i-K0ȥ%r-rY&E:3 G:J( 3]{2J$7I5bV6cI|$ln[ڝ51՛N^!l]Uos'(>&2X.d-BA2Z_4w{\^b&Ev160܂d v*2Oa8BNUNav-Iȇҩ%@5%,6Ȭ*iH0ʥ6j:YFNI9kd}qK?=w7B_0ZA8g3loiHn@|8Qd`/Ɠ ?R KLֈ"#iK7~{q}19m<i)I|9#T 9ﮯi2 㩑D `ޥ$|6!pKm%Wh(޻pFFal"qD8j.` $2{J.mVۦA)[HMHbtBqF)XI֊,p}I>RR %x)pvD(c58I%⦅7dD]*V6p M3Fb#uѹ[.n@֔#Mzaq9 KLI̢jLrNQ_#Y2#e>l.p.T n ks& jd^BP֖|`ҕ$ERɐ@492  |u*"5#<Ȓ̺HA\*cxS b"DC񬲎]քώȻ=_ IS`1yO1Idr"wU@9l`ZGŋFI"Xv>Hp19N8%à8x t tF+W؈-T`lߐeȎ-niqS>7`h_;Kߩg9 '^0rz, X,={8.L/=dm*x/=/ )Jv<<)P"@2ĩAEG3 l$ ~EҤbdVv|PoCjzbDyr6@č [+yşX4~ܬO'ߝ]z㒅#](%P4UU_&$+'<9W^K ፪=P ClYf Bch'nn(}zХ:->JhIʻ\e#A-V<Մ28p(#!lhBpLj3ŠSM&/b2><%2ɚ6s>:#9y1b%3&Dp!__c͈bڍC$Z%o+.jiPN̨V ^:8_GW޹q6gӂ ##&)J|r^ݾacPjf }g֦{wJ-yF3% QOؗ$A Ҳu|mR^1jͮទ9MHʩ˻q:MoWCFI0ɹqx燗7gׇ&u\짃#riN`aBu \\)MaHiT;cҲ#j7 FP5f){INÉQW3x+Ϳ+aMpDWM!Bs_`(6zcHbLE)P(8/ O9sOO8XjJ&UϞڧ-@z1;QȌ*} [jXs)PdA5ZV*Jqf͐ +ыTf+60Ip:2RÐR~Dlt-P 8T,Tc,atޱc1^4,JIU`e?M ϓ3 Iy3IX΍MI06%HkRbLFLu3B`czk߽B'?`7=E.w/G1xuqE׺|偀؀.gM mw0уۺMƯm3›Ԏ{h93lX+2GG.:载}1ċ_ zgMYܲ ͹|LgtؘSV։kl]+hp8c3^ϴwQE)eivxgtY%q4z+ќ# bвj!1؀ӟIvV6=clD{è7z02af&[A*ץoGm.Gp1o0Wj'S[.m-2cq~Yd 1SAI[巋'Tb3Z~줬uUcRGd'&ĺIY{RίnnRLUӌ 5W~>Ξ3V.3 NI^XS{yRT<մu\(R=z4*u8ƋF1!SGo]FJpzTnl9(o8Z(&*)DMdK)RRs8ºXw=wI/?)=~Udb3?cw"4?ӟ\2$ٝәK^-K"/~n@N_?igwSQ:4: 5!LfRk1/oQc}d)܄ QHQK-)P,*J]Rsi"P |gTthˁRB_^MxO&p ҩR~RWM$u GuBQźNu{xu<[ y*SdSٔ:s&'*9? 5L (;s|* mF}C%R-3k H= )KVTĦB{R`|la>'vx6(p?OkzOr^ߌ'Jer`ѓ3kjatYSxЊ -9)0fL@ditoGEߤT > ߤ^DbuTphQ-TZ#0J s4NQȼ%JxζLB1n%J-+Gr4>G("TÃyc.pᬪh_]kNš\qƷՎW5~8|y>|r|S0j) f$TycEQJݗwBW. (`*#Es\!ۇFR#p:stob V[iJgVyC|MX(79+_$ /po ]:c>\ʱ"52kFhdkC'R{۝ ych@5׌9H'o2z)`,OO9@OH_z .mQ &wCyc??.UƩS7e\ j\~QQb\dZ -8V2gY*U""C6U$Ugon4 .ߗkw835 }eoq&!v:)?Tvl?6K(CWMFjaጬ,0n! Zj˫UJ^'너D0pqK9g9B ?QS*KSSMx2XԶA t_ `P4Pӓr4=z޿ӂ;Y|XE5q$v{A0PEAFd2ia܏(☵ixA+^ l4Qm $j~xp]EKlX uyPVw<+ K$6L1IRȄԠB &3bĆ+ C?u=a$$+r6 ׾ovU ǰ6&JinM轰שH ֝v^o#$J7ӻI?ͻĴ),%;mw2 .W:?{#_zb$^do̫o_huq}C)m7P%ZJCWTZ֟ޭMWňڴ 3[7\3Jo="7\B!aZvMqOwƍK[p Skf`ַ j{|EnZ'u@7x jH.pO+=вm$1ӍNߘ @pwk5nת1`QLjiխfPєMF1iύaÈqXF(05&U85(dq+Ilp TL[bVNLcJRceIZ2FxaSY‰ 41):Z%dgO!&#57 JmAdIYz~aľz>wQy1kD)NQ?ɬ"\Gʼngg[Mo=EV({&p4@+UPL>-FsNsV]tx.=zqtIcR?v6k8̺"e/!d|oq^ڨsDk#nҿRg@NJEˢ9N.,Jgl[,AhU흝΍+~+b]Ϗt[ ՊW(x}f~u몼]Hj0ʩcFhmyxY\}F֪٘pd*Llth3]i `[;?"[HՏ]8">Dj|i- #Q],+<ԖTUh} -XVzZ\̨pK.:oK7ִC1i*>8NKM <{А:%B_7$- }FvDKIJ^HӺАA:U:UsӺ &`B1QgTnnj"hw ibh W5@HVn5wq{|4V7^9],q3nlvpO##GtRb}KJXĈ[(*d1ѡBBJ:Q8jN5k~)DxY\5T.s eoN!ġj.JG}>!^8'R5o-[ў+`T{j_ xɑY+ۜK^xƩİT%eOJd&fOiB$ngI4D9&|a 䡾)8X+3a.!Au6lj=ߑ,,ƒwvq{ٺob8Y"䑑>{g A%y/^5_*[#v+ 1.g^.T?(p눴M}3o$H6hm2YKED"(Wdf\8LR٣qL^6HAA3U *@!^7Zqv, -䭇Y?\,5cEtGBsit+:/MN沟0b[&i4xZi3/NiaC>[4&Ic n,ˌkT_,,tXn~ynZ@ #-XWYdFc^5lғ09@\T-9/@<)tK<զ XxKK5udEl<E:˰ScJv?;W?3%w!xZЖMܞ>y 0R#H!2bsn3.QjU.Έ1T 1Q)Z¤E+!)=YHA# OrHO6Tk>nWw㼺ݔ xzH+jISM]qn2Rݰ$-eeX+Ō+q x$\/{׮aQpi.{ zncQqzk׫MCyP%6V 0ǩE k9NB(MsA I[(%іD2fņSQ 5N5"W!iH$#Y2a4CZKmՂig U \YPxGo0>0 ‚fU,IMq;'R8#L43<\aPBQoP#dY#(`}WPBP :ņ^8ŌJū㖛=* ٻ8 pAzTeȇ]$-ݗ ꗥ]YH {XT ֨MX|Զs9(qR8 ή5@ky$Bp[}\U.r(w`Ztx`- =Qy;5X+iՊDۛZiEk4 TVFXY<\}n ].j#*˝^'UMvBv&pZڄNRG\I~:mFuE%5OpQ`Zh5 D68ݾӽ[d<''ї0}tF|?K,I] 3Gj9N5###ɡ_ժ{]\;הDuz@u?-˭qkrR`57(+͐hZC%\p*M\(Gm%N|ˑG({E$[X0_\z~ow~J '_R;nA\5o7|*(MqBjLH+/2W U&GDJXىpZD;|rhdls2‚@w2ڝeIs`2܄IJyj"=)Ê{b3?#D~3LMvަ]j#f%;|ySu0+ Y:O_g h}fWy{q_rmU\B.&oY~.=_.ߜYPe#{C<ˋ/swq~u_wo+?,Wvվ~t,bH@K VnGq_`Plr:YSy\X?bl[~Cd9tΧ ӮzYށktç׽qԺw ñd;PCNcu~EGm!rG{z]"gN sD(Q1Ts'<); rm1̈n]v q7EMsJ8hu擫Ha!،&O_ivh ,@ ]5ӒQNp~-+]KȟU`eC{^=|Vl6=bnס1s=M&`}G!LgԩOL&exY@ѓ9Ua(q߿a|:JJ;#wС~1Jq{ڌJΌ]~7cQ9ƘEv>Z];G9sqsA-,Ԝg.,QeijR.MWy9;V$aD5$DsBF-J<BO=ܓ=jjm:/x=W!v#D/G2#4^H dIa" f Zb0jpJN gZRGSCi:W`(ug)n8ica˫ˬxk˻dII7BC|foO7SYjܕ2s_jm3_Be*|UZTJiʑ}ޯxFߛٻfo̚.f~5`˼筞ϊj[9(*~]8)u)w+!X~VCՊVu8-@d2,P#2 R,AVخcl*2JJ4uBt/a0#i#S&F[wMc?fGSo:Zޡv{1Y(JioSyPe̵YfdF#]Er̨G;HT(Vȋ2”LLA)9ڞh r5g->n?ͅ0$VȩuJ: l*?wǝb9MCJ"ɂh6_oaZ.hDc p,oK )>R$Ak1MaP$m-K;t"+9ΜI6,:d2z:;="J&ݺoܑJiV9_̦@:%1J.Yrv5M޼.C3R2IWv%>{10iQV )QGVIM;eѱ #;Ј248+QF.\I3ZQ %pxbއPjhq֊-.4ٯtDVj:g#ssH0=:lj2 j r#BvЎ*{R{ aP[̄‡,&+b}иge<EA V.AIZ(3C?.ޞ|ypcp^kS_.^ϙSCYfeϿ||e ˔&xu1ȶs2MX˱U㫲tlM7WrdnS,ϓO|1hN r j`G6?h:_-HYPu|Z{r'f, zulCc6iaWJ'>K,6y^3Jɩ/Es6\g0kEIc6_, *XcOJ gܿQFqiѨ n2eRi&ow2\h kng{vFoЩQ'jDaew*_?]"s` : PonjXN@xepr4{pO:#/tד4ȱlf233OVb#-g060 *ӓZ Z!oϴM(@xO,~ZD"hf4[Ӳ(r(0MXc')DI0@#C%$|%ڷyK~;k}T:RH]@cAVAcj6U Jc;hm$v.<.ο_ֶDžϊ/sY;VӲ!F|KW~z2ys~~Y[#^PyOaXwqx*@m9 ͰE]>iC(o l0[,O~^|k~ o$'wo@Ңzͩ*ubK6z?%Sr{vC2>UvV.n>TDYV,G/ŵJԲ{joӫˬSP_켙:d#8Ce}{><9sh4#ZﮖIedE"Xr싘4I<5YIw26XY{0i9Ȅ(D>d84d{%z#LEHF >߲3ms`H:)HݓfIJܟ`Y%?ä%0ߓA3Q7-Ȧ=Ua  -9AMM7yrk?h>p`+ۄĚ6rL.v>V:yhDZ/haj} KjA۳,Es+y1IEhTꍽךqdyZ bB2' k\p#nzBaxӣaFu\PyEb< :tR&x]bE'D eHe|V q'W9d17qGwdx6ScFIhel1P!q* 9T5oHE7ԇ%v0? Ic;nM45.vZ5e?o78)F, K8:$i My-i5QKSP9݄Z o)l;004Cc2h<ʞ~Z!U| dQC~Hi.TK"r+mF/D2ÀMwf6däa5mKIc߷$KON=vm&*"{3<;{ *Oq^zs}ЅuК?RAa!5\i8&-"]ZKDKB[~c+^J|}$%N풀>2F!eyR~l!<77ϓVVzigˑ?zdՁ `tb%i0q2% d 8v1OЬ %%&QFn9/qkT(G[|jrֵC7uznU(7IRRxkTFp;smQMlqjmݱ:ce申b(l0ʭ&i&3d*YׄNnr/aɯnzԡZļ.oD%Uf@ 4XEͤ!ʥ41"OPXPrY+/;\5^Ah%mmSQr/ ]7  E7R[ w*֭n߈"^ *&H!ʪMvu(H^4ئ0*>ld(͂dCK/Y5YCTFTjL V6-tj^/ЩDGW1b痷*4 & 4qY<~?gwoJ6I?mbBmRNZ 1X΋'XonѬ"sZ]K'F>vΚmnFZF^Jt \/"w/fvɦh͂6tC ˂Ν[N 76]\,Q<ꎱbK_FoY2z4oQ_{G-0Kbz"]zu_ $5NNh* GA]TA[cd[٭%X|a;Kd)Lm{@kYŚo 5NbM ʶ-[ZkC[YFS5#ZڍxS@8"%G!Ɏm 3F<*52TsT(LBi HQv꽉STݣZ`͙93SBb؞^83%D \sftfJUޡ{" iij8pi]ޏЗQ]'C\]gD+)NM>&`ܐCU-Rh FA#Bl` g K@s&vhyupsm{λ ?aUYv^K!|(cr&C2!xD8̥*3SX/%] ̐5|Z- )PdD``Hf.5EuPVDf L3"hvs̅5x`x|Z\_QG01#|cU 76}lP&.ߌʙO󘐒/X,l^Y]kx1̻|zυf닟{fɜk Ҕ!Ct8%F{9p OL"?_,p Td7X.ܱ "ڏ:['IpVcvxDMNDh7*B?+M}^p OH(C]*(99VQQ``Q;ՂxYb >V޻8}(A<%zSC(Ŗ>YC(èIJC!Y/>}$%MmJez"+T⽲MC~5U8v}bui?qgʱ$f< Բu>Қ^E) s΂n4w2fBG1 ~:uUk L;3+*>6)qFFBm8ń6UArfٺs^& +ܯEHЦEy טr4i4*n<ө՚ DX3SgmҬ] g10z : Z`tR7"LfR<ŚBFVռ+ZC#.f&E0uVhtJJ&xxeeYh,H0k?c:tH+<וN B /Dˎcn<,Gx eB/eS3siqwy??&NdЗaU{=":bA^l\* w]D-zěg,>Bc\Os^{7uI뭹MLvC׭1Eo#6 G0g/]<|i>{^oz|g>uyfaa8M&vSl4ks8>. <4, ncl١'ҋGHcȹzj}ȉ׾ʺ޵!2l:!O >UN?t(E4Gx>Hp\|縢,}(h<}42H?%Ҭ=A~VvK&>{! ㋷<4x]P;q1oX{5 gB8jd’?z*ru!;G=uKGqأ]ՠԁ OS˲<R{: )֝0$(QݧEHBJwl%! ǪuZ*;ʺwίO1i=׆$=Sܡ.WZPKmlzv9 U`Vj mv>$zk_]q>=#@HP̕=hji~pBoJFE (s4 Pvοt!zlzqlITawV_T쓆U/ݯGUE+ye(T&qp8 68E] xH ?jl.Kb66IiWy+5"4$WֲU^) G`6%lSl3fT2z) \čgi]lCӶ,=>Ny[}.Άw@njJݳ98Hʵg$M8 BGA(9|$ɭ ],lX b3ϟ [cA1˭MdȝEdP\M''nܚBpv,P<.llb -\GGEd C5J!rR9@;>zu{[pmcIreNoSPbE6 tJ>K.z6/үw.܍.>;^Y GO{ݢvZ./$WfP(ڙ9-/<_\0_L|1o;N3Z>.::>X\>$z OGK}:}vљNjE9UڡT+& N 9}DՒ_zIzXD ,R>Bw=)4e#2&597kedy/ZAf#˯ݒU,DF^qYf5܋d:͊>kkhN.h<ƉWޑRc3ԥ#fX A}Y8$wX*5ٲ $#׻l}u1Zlq-(ef 4퇬yߦn_I#qInnNz9#g4 r}UXDgi^nZEr6% DIH;qrk_GkC>P9.,j`HrIkt-7s“T2E=a+Bk6Ia) pz@гJj結ޙ6%#Q(ƄP0;|=~S'Psc!QuЂ&`>9ǡ(ӏhq+%( &s5鏷 TRˊCpWޡgDHEaRkYK-qww)GHn>*1$#LdB]m(tPqBeJKHQE4TkCo&%KQ8tQ_&y(qO.[[l( =EAzd^B֐-d;z }b;nK6eK%5]M/O _)-kIXEN~7YK /^~ˊӛ6>[IoXf.\Kixi㲬 4݌g7_Cnk%8ȹW2QYC}% F=3Û|:qSg&p~?+"K|v$8Ѥ8WwFUt/a𐍋1a)@_n fKSFZLSQ;7lJbB!sj+\)`蔌It Wc)4f^U{jCN+^x ^ݹ+ kktjep-9`˨Qzó:z"8nz-PCV=wj'8hi "aZ1&2C#J n3$p+@eT8Yk1gCh z'I& ùj@"DӑJzEG2Qs.z'+a8k˥F2'\DA :m;NV{w܂M*o0hƝI!?`*(KFy E&:Ȅ&3LVцJ@Ot,YC8 g"s\1zF5.t(-szqp~o}@s}JQU|4mr] 7ȊЪx0pޢ>7wwfM#ܽR~{t6PKRt06Un/#D 1$y>Ԅk~6g_ﮮpLXxYHh(Ǔc Zxo}`oàJȴr_X$Ƃq\2&d՟HlS(gAJMT݄f'Mֲ݆1gH6#]d3k;W]8Zg1|5ɬ1?Yd~<#`Ƅ>a._l2kq_tZ*GF0K[L}WUh0ëfLgHBP۹^Sh7/ɬ _>8Z'Iڸ/+A°rO:*fz'^HVK.~|zUkNZ d ȇӊ@[ # 9d-HbJV+F_0[~1kid?Zx6^ MϲIPTkPHsjery&#U.t)`1O5K2cLgd:7 =k൚,Rĸd'+. 2Ek6Q:"Im K'I9ZO,zC(!)P BJ@90ƥgX`ku&H3@7B%ID>-SDazPcP*3~UrhKWɽoBD1h ԉYr!73n%̃0܂]~JϩZi/Iq1]xhP7p^P!PJԦlXZ\/JEE(cע'i< cUBފ.`D#F򚴻wURlgcR)^ՇhtZoC\ I7]W~:AZ|eтuj?O[y-;z:+pɪo\[/c !A+ $U nV,9Y$Fm܇V< ?T_HFJ%1ZwmH_Px?TK*;~ɞ xH-r|[߯1CJC@ %tR)_75&!rϹ$aǘUC(GtFWQ{[VTm=l*dg.vzݤ`FY8&7+ٲ uJ+py+ۈ;X7?Yfu$g ?|sr+L)rFٶM=;fZ@Vm$wjuc]KQ\4 )7qhWZQMh$αxڎb'ZJ pruʜՆC&=6 oOUHS}SO~"=J  LX{_}̟-}| Fi<  LQ<>DiM9)E@FvKpW^G'vYѣOdg#&9WO*?:;n +]᧼v`;0G&%̡H]ȶ#Տm99FxcN[/!O Fc h-UMu1BہRѶ6k(D0ĨP1\RDPBmLT(RAG0{U)Uf'a( ŠPBYG:iDf4!7ureFi]fZGSAAӄX4=,mJ\ʕܺ!A 4'\/@HˀjtPLf5n*NA7L'oEcRi#9S#+$H V>f'Ng\+=dUN1O?%W}r6r,8zam ۶$֦NLsOit%~]_;5beW'jS263+1ʡUp5eJ&6/F(뇄欭Sί=z0݈b TFM+g1Gd镕gy9'/hķu(hW&* $)u@1zF{~\؋ghqq#'v+-zcz^yfmo;vYD{ lpTST1pJ[+d31GJ}gT_3or, wry= fwϔlxʫ>ߟ_~~y̍`%~ay{~d ?yacY:LٷQ,Rܚp`O'7hzN K.4*TNqg$(lXA`iV-plͰo'Ɏ~Ӊ, G/kY=')vJe ʚ?\pwY v)ꝳwtSOx!<}ẇ* ߼53B*_^X`)B!1(n\"I@+* PvJ)WZa_avwLV FU+omQ.ԁt熠"/0gF)I +.K ^rb0c15&!ƅU\̰cL OhF fo>a*ѧRrgԀ1#DK7f 2d:X:L+Dj(M3)u_fxEeZB#mSS4g3sW y%e(Y?u1 /r<2LqS\X4&q;Pdc&5h.xC{IA}1ImA:XhR #GR*{͇SiVݿ:G^ 54y5O3XlzAo 6+@ ku |b`GXc`\͊Rzd%5V"(r~6S `^BWa\d_>5Rz͝L{&@;AV[xɐS&wW'%.^20F,A;N'8&4Ք=y^BC)@*Py7I*VmVZ dwFtl#]g6SVlsxG9WO4hC<8P `hafa3=87+$Ze&pu, TQ6j]㗆n+ a T^iě& _4Y "r)>xy1;;8/:-34_6𼟏F|$m yQB+F/P8"*o]rw8nh$@7sJtdUBIԦDRrF9 jt*K:ȚoMxP\<H  *-3a{OWdohO1~ }YF" #" ,\McEa-eh%fLAƓQf~0s̮6C8Y>x8x'S]Bb%-cM%sJP%9/ <&(yaan&1 8>倫0q(3କR>!Z40CWJxΣcz?gEZxq4gjw5(ff7C;O&_Y^Ǔiԍ`>P3WFJs\3@jpLE`:|K5Y>LDt>;n泳?3o.F}W a\3)7N9B\`eo^1)4Q-;OЃ`ǘZuG['-LIiXa(JXz@<~%\T͵vr'b}=Dh&`k\S9g술)EF 4֚%%hTeRmr2.ZDRd 8!9j 3 q w ?eޏ c \"q jÿ9UNb&>ߏj 1:cAyĔq]T-kϙŨ4h{JTy TSNbCն 8CsHdd@X+x Ɓ"FO5Rk}wZQfA $<À F$XU#whzLVP1@@BC'L@)PHK-  "&,Z€(_BǨ>@1I偃WAS051҅g^\pe8+_zƁ 5=oxffbWю'wEm\EƯb2,:ClcӥM~u5{0+O-?x =GH-( [X{'PS a:]HRяYVvl%kV{c 5j‡Nck'4@tU|ǜ OqNȁF/COfއUYp+O*Of 8fx(myhu52e"8l2zOo8OX쏑u<ۼJ糡j}dFDr<4JG,plImxo?yԄh .'nDDU_(.9j9 ?.%3In^S!lqS)-lI4x\k AUjSܚL ΣE<2ΓGZ)+ !`,R:|,}ke2t\[lv';?#G>ILn4ЊHuMǒĤ}^z|p.|Ԫ$:VJנ\[a4٧ PA7MD)gs9LQG882C"ð*8D@dcjOQ^<[$BLct+Ǝh(l 6v Nb($9CrB(-b6kh=WԪ*-) FRhT8E+ fY>FDgv\ ):]4rͅ T#31\CHIОP!}FAx&ÚdrH,`gz{LD GRZS2gZǑbG0=,p}7\|3D;Nf{_v!(ɚ6f0$2UU$h/GyBO$R)*lb AsH`"a$%W_]ٷU(^UlB!8.mBegɘQ86!l"K, P3!r2\ vv}5 Qx2FFG0ANڼ45᳏YuG_2 ` ~eds^0Ira:tEס__}nwpHwwow6OwdDSSqpv"898סּ&nEBs($;bXV0IuFIX a(]uzPQ"ҧ3+ID)1 s)" )MY GK՜c/L+:׋hM.Lf&ZX~E'_]bNpݻQj5{UTJc_ ;yCF(J@s!_@r)J2DC41/EqFFJ/O%T%Fx ͤ2x:*i4ޑ LRr mWmn ,|,_\~Y,P`l#{6?Xalit*hzwHg9/}|W~b< ^FoT\wvNs0꾾Vq[Oj,__ʘ1cy1,yP6aB2$٢rzl_l9LZoĔ\CD 5?,B9;W'}9:רF{F}R1"[n`?~%)n]| Z_yso\քRh˔.bjABEuadn1˾yy0i8KX=fȸĊhADc%@t;dwCLF vr呖"xu}zL9ApƽxiYȘnFo?]n->ƸXoyMxG_.wܰ@;}P >7.)|Ͽ1ARfV',1Vu T=.#%t]إH;P*AkMZ^5xj ߩJW ]!:[!2 :6έ><\*TEdT;TȨ^յ#5ƑAi3ul_[Y)xmv5G:Fp LW8K'N(N\ez"+!8M]JRA M$ן)Kn!Hkm@Iu\@c'X*T}˷&yU(UǶ=COK`8lJpA`:1*uD[q)8e$*'ȧ"+Uװt05Q 󘭮;S$Q +|ÄDTATX&TZlJ!Xԙ鉔 IaOM(VrxLţN;6SWgr04MM(,M ƾ[*YJ[w\_Җ8 (e0;Yէ}yD]}\x MH7,QN!eՇ/R+u)01uDr{U;Yg* bH;Rq.Nt\I`5 &h[v9q݆R {i G!~;H~ĜhRw!U@\r{NYjB1-gt|a1W}+P+ w;sqS445YbR5dp l&i&0?*e=Xji fgV'&L (*ʙ]1yTAbQM"wS]!D`.dAW&(ixS*7X?"X6HԷ(&L!FBBp4RK+T<@\Юaah8=DQ[Vj 5kV v&Pmة >2}ZrT3]*f>rwwNRwE"`$kӗĠ)ŒK`{N&XeRѐF8pj[k _;r2 9 =~'" AP.\"и fD.(u1m0}/̼B8=w\.䜋\˔:,駷ݖlm堇-&\*=pTU;oyW_j[LA(}hxsRA"RtkQK,l{9xD9`Bd ԙ`6<;7_zQz{v\}*T ( \So\B!_͗dt* źi2xr[,<=hnM/էUx _4*y'K~s& գ@ϏzJN! r4E6Y^7_1yEs9a#P,IM)'5b?O<,WyJ"AttL$c&JNpTatWw3z ¾G%nA#Ĵe7ڮm\P~tUֺ`¯>6'_S.8}D[7Ǥw6IVѡMX]G88Ry4G)gHkms!stλܻaԑQ#]БAF/Sp/+>Ϥ(c`@DQvU 4-jp9M\:k4K縵tT1n]긭qzC3t:g,.)]Җ-kι{w[)D]p\b05 `9ĩ[ذE(Fe录8R u<EGm:MmSn=ۄ!ߞ-6cFu<* R9umb1$ d7wxB dd[Y}3ocF6yLl>[}9F-NFw/ ?LLD 4FI+vx)sͣك¢-! GhDl6a{;w@8zX<~Y76b@ȟo<'7y}_6(1'W ZZXbbrkcʸuQа~lESñUʦD?N$8RDibRX C)aXŒsT} rp/ik@ UԠ4k8Vij\jH5 xsR53P-@f8аb1[.GDreÖO%:A%j/`d8\̠/4b C}';mbAF%uȌx3x>G$h/=&&kr͠L-K#%ZU@9.7nou~% nJ|ƓgtU-Wbo)W"ط>yA^cg=NѮh7p;d&{Ki@"{Qht[XR6ZZmP]Lp`=xMtR7ZSKd$*?~ԧqLj2CR3IX#)%m2aTI3 :KN5ڀjIN?[̲o~N9|M44]e%>c1JgcZNNVY:#!ßl5ۊ"ew |4W+3 GL6b^1};ǩ=Œ40]#I:y|ߢ[BGϼd*vuL!Ҕַvfsp@Tw|?Lۂ=ӥZ jNê *6ɡúC}qVes4|6 ؈FҥU0UDO!2H@N2}Y۫cDU二:6Z-Ÿ&R..{xזxpgS_y3O7|9/{?0Fmh 5tD\ DhUOKw4ݙ;4ߝ="#Κv5ћp 뾄 )xy]aϵ m<YqR(I|h5DkDQj!H0@#u[ 5aچ]9hn:N-xsw35op?{WHƌTlյ΢ }ϲt7(vJJYL塔[݅:Tf`)&}}{&"Or۔N5y9re{Aֳ|6 ]\UiO1b"k"L\)" i^䡫VVB캎p>j\'LŞMdӤX}̻]P1Rs#1׵7%T2-dBcT+lxRKn#r.hq-T"ԡTv-4s= tW±ZI>=ɭ/`Bx6D p܆}T29X TAGsm3ŗ81R9~ NCAȍeD&i>14u M6lTY H 3/pŚ8)Ѡ%g #K9gEm*WCڳn%JQ;-I3 0>h/5kOˀ8E$#5H ·R40 ԗy0V0D0:eJvJP ǑdcI'b$! Q)ZB Q` Z DMr˩` R"%g>(H\$i%4N82zBtDԔ-%B|u㿝?ۜA=-@ ?xE4w.?Lttȗ+NS::!ٯw^\>\vOO77x}N6ԈTØ>>ZN0YN⛾XsS)EVw+Zajh/x?F8)^~ u759֝Ɇ^/lƳ8~ƙrZѤ:L:_>~ 񌴀0pkC#3*ϗͨuӉ) eB[ލТq} KNn5Tc?jZ岋ͪ5LOo'xfV*up*i4Vag¢ǬBC{RaF=_=x-ұF)r8nkt~ó^5 J #y.V27'7aFgx>_]-<3蚣Wp$g^!Qf7BBĀ{v{=n t|e4auVQn̼ĆJ $(JkG,q^ «`XґEeeJ[# TXvl. rAB{cN(,UԔJPvS^R3ҶsCuqS@*]*NY_&>LoKiUj#$;e:LG(RRe( |X cC|'PrV% '8Ot8WN q}d7 PYHJ5+!$C2(Uh4aBď /CWxP.` s]sFL=W ˄bScm\Aɫ#k>2)+1(yУ/M醮pRހcfSYm̔1+ğm·"BAmA {_طZJr+̠!7W h?!vךoƵV !6Lj1G{ׁDkW?>2!TCX}BQ,͋.To^Z^LCUƛ v7b€Ճ+^hV`̭uYsaA  _7=(kKsAfQۨ f/ y'] Rt`/ebRXDIvT9cLJMڼ\uv{mbKiD%nN6ޫsIZGIQx Rt9TF`wr03Ds(sRai5\T͙uʧ #.+ׇ/FSV$.A9u:/1/ .AY"('%]@0¬6!g1fA(T։X`0Պ`b Y H |*%C+3aoTSBɁP0r1Suh\hҠM%Q":8Lx{jKL*Zڷo{ɶmiz_[;eElsMpU+fiQ2i"z@T\t"Ć)(C3p;TV4OXx@㷧F"(YL h)gç~>GxyŌ53e _8 S3ϥ_ΌR8+L'_q|e[=֫a\Ldh9qW Jz.&$ N**pv4JX!iȍh)uù+/{|'mPT h\<>ޅ s 8a&ʔi7Hp#a$DSy0Vnusx.l3Y XGͬLԻ]R210LVр D;i^G9'=K `p՛0ܔ.:£+nXLbx X:jF*ێ5 Kc1שl 0{Oqf]+|)KɔQ)W|4Ey=yw'! W ope&ӛ374xŤ֛rFGD#qբ4)$ O\ L*0K۵'09*rZO/T v\KSѶ֠5OP1ߍmY! Rad$=O!D]̜b[θ1xiDKJ80N LZAU=T T2y ~ )o$iZ&<#d}kR(7R5JJ=Gl#60o ڪ(75u^Bqg]~ǃr'IPb}%JWɦ(|mBQ![3Fl G<30&U|8GBWj6(mWΈCȧzłM,Y]Moq鼼qN@uO7Lٹ0Jc@.4,y3F }{&SLj432/6z+އWR'Mě7$jAdU{HKv$-nvDCJ9bw9bWz1mQDQ5ѷvpMGdT{>w]zE2";gl?v\hLήB]_ Ty5[vw\Ne'!HM$x#ybNp d*O1\pTP%sEm=:agԃKu6]CID&J_ ۜ6lUL)T#Z+Yx Jci*(BXTV /+=kPl'Ӕ2A1f Ḱ뉱$(I<4 8O:r*LjRp9T?[P(38ݴ(;57 FvRP;Zjj\ 4֦ǚ׃cy.?-A@A#{WZn#Q$% } =HB~fCU Ud8/rX」VX%VGuT<,10tiMld"DG5@xR (N#g5E>zFQXCcЌ\lQ2@%74B*5&DAW ʓ`@`80u%1x4(0]"\1mсZG"b*L"ȼ`;"R#ևE+E,)o8yV>W;*۬^_m׿@fO0z4A 廔VpS^>N꼧hfPyY+@|S"WwMGGNX@;cR:La1|-݆ Vr)vy0l0]u6>nDuooIrmb`d`v)n[r|AXR„Hl +5778H%wP:25ea4pheH ak9"1ҩ㆝,Sr)$F83Fz2gh%6x ')y> GHHDŽ V[6]p'>b?f=0dkP-Ĵ{J?Z7!Q eAj C'"QH'55!3Q'D>M&D ՉԶU*fuhOER&mۧTdwb joAl]lfy2&w_?OdVFpiln3vK8Nqd”0D!A\Z) |z5!$?>u7ois?J| _JW-,@9vJ9p(Q6} px@C ˲-C2pz/ !`+.6 Fwq\'m5b whbvfpXml5 8&؝IAM1JchA= ʞ!hy4@ @!ǫ& 8—U%A%@oQ*1I*ZG"y%^xχFp,4RgQvy 20HBUfHNA ByQo^"Z<KEF _,f2aFRPZ< 7o%S)2FʴB1^DQ4ћ7_e6Oв;L0dLK這Ď*\j櫬plK 䚜{b 5)O eM2UM ;{l2Dh >:& SLTB3ReH=v]"荄+6u=.T<:/iբiɕ>|lZ㥟%VI0`'z-)9="cem!MPQw;Pi IР}wwz#9Ky Ѕ$R]γ @ňЗUUе7~}:Ղ390r*h'P }AcҴ$n]T턞=XGs8Rtc oԅwf} M.A /=BNwe\Mg+I/q@X*2,kqq33g8//ffq6N4 U~fI5D^cKyЫ_Uj=oxZ42R)eB+6IfsF>ӿ"y/w!Ʃ[يR } +]ZO\DTFzVv.[gnM1":MQGѲ ћvk^RQ!!6)ཅ}vkAiz: T޴[󂵊ڭ ELS[QC ]YΨݚbPEt^v;cH#IoڭyJCEֆ|"zL)5=N.-L=cO[dQMFB Y_yM{^3 frc5}?K//OsvՕtac"> 7A7~|ŬVӏ󛼑r$\t.&;6 |9Vbq_J7bN!sцJإu cYY¸m"(g%\xdB#u]trJB!yֹU˔3@E yg ,>wt_)*+^R?X9-8?c~W#9G1xF(ܣ99 2X_݇4CR)u>+xI"D jR~ TW`O))KemX'0ڬ]t?1kP4Lz`DfԁU!B:?QΎԘƤjЋDR@s>-#l%T-YV5%YC,y6yA0%9tM]-8׭nbX~9A()9*EgYF0ѷI OI0w6HI(C%j$j$j$jR|IR[Gm`\D' CuaI88@$if#`GKyc mʆsݟ^hW7۫*Huω*U[=6J&UtII['ѧ O2bPp#lދ :m\IJ\ZM%ZBJ-ib4e+"X F-y8J(I}k HTx_?G}dOj_r;TϰU"+vR"/G_jUΔՍqGûRfiVIillߠ蘇J|]t.}d\0q}jk,g !ͲTvRdf#1_|* s} 5 U.zuJD -;cܸ*+wFq1W\+yQ -F[GbL2%9GF\Q䰏ZLB8(4R-2Z]ZǨRpa^H$m- Eby+\ /SZ-:ueɖGj8 {88ᘔ^+  E(,JQ(,pOx3*-8 _\򟯯Rrju+}}Uǡ"lR&)<}^vJiT:ӕOf5cx3gXTI%~x-y+ITb/ &7`] "2bf_ox.WpfpĖ}+NŘ<~{t/$䘂fEJ3y5]`G̏HK4Oh|k'V$?X ZK?.e,*0 \v@ؓЍ+o;0deMʄϼ͒:rVlҸSbB9r!cTLl2Ś0zasY:YS{XT쌿ˤ֫ һ-.f֛eMWM0}qp|7}ի?\l@; GH!t#8&NHb[RiJM߂ |ਥ`:eYԖ!OSm$6h&lRD'R/0Rv4*w~VE vwhbvfpX]'`kA9v.l''ᬢ-@r4oVE۬f|$)N=Y^y/1=VBhZD Yg,z^ܸ:"ϰDir'ps@J%A1è(z6Q1BVey1_lC-:fJ4v1`>$Mac;+FI9L)E4  |V4p=ȁ$ o{E=5@5b!&znL%\cýQXyӻ 8̞J:JwKFKtpTbriKËcYkp(X9tuzchj`^ywhn'w=yLaq}򮓩uXS3;Lα盄  v J#̭QKb r"3` FN sꁤЉS G˚PM #,Kq=Ka1 q\seLZud_h1j0lkD͙@0 J8j/9ǕE gjii QPƶs«0* ͚56|Hgc6][]H9{q(aG;+;5gD=K„py򂚱B!' ٶ[n`7lheآ'#H_ #VθVa3 A0h%Ӎ$͋FZPSvz=tQ_ۍM]sͰT :p*܄l֋3p!T"fوp[O발YS3q-o_>n'b*2c8QVZI傣LuB #eQ)m0^|e굈FpLTK1gpg,lǂpQ*wfZ9 4SGpV}FÖOr^^GIPsPd=+} 5(t勏m;lOTy[#$^B{J?6 B;~ TE.XzO6`EXG:ٻ6rmko$8_lorK%_6aNt$e/u{CJ1̐\qKe{!GwoGn>6hWu/nI0$PR{k6e7*Mp^-jZG.2f&ǯ4|&`ٯZS¨fGM'RN9D c?000ϰ.?Rmc^ƔV*i$|[R;…Q!(.OmRjtK.;QVaW#''Bk%ubBvKKaK]/a%1nX:^-xvk#o]3hWX =|z3mܚ7\y10SID2`eekJ$JKWq98A7nU FUfx3'7G] %EB&Ъ;, otXl`=FyI1.[-,yY!Twń%3[8FIWtwgqtʹK]3={]%ƬXyyX[Ykiw{ TQ\_+]fPpP&+[bDZ%bcsѽ8P71}_`ޚb;ZZs<> c;xK).Eo˂b(G/EXbeb<# popS!6̓  KP@i y%VĂH nL 5{x|Uɺ Ǯz_nz:kwM: #E*hU QX9hՅF 1b5İ!ѪHE cjC$xKoJa@a\k cX3KFѪeEN~ H䘽?wQaj\Oc<ޟMTtS _! VXP\Fۗ"uf(v{P'-RAmwߎ^YF  (-Y^W/2BkIb{ wx&xCaZX v* FZʒN"杖ժgMV4Ai8J:ða7S+JXB1˥&F34 ;@i"[5v1Kz˵ p'(SB2'I +}p-Ѓ@ 3{XNn-FL>QqC.:橎κ(8GVy#fbYx7x%+OWW tzUe/=N^eb5YJ^g~|1ML, ;/)5vY<=pcԭk~]1?}|z-&(&ϟ ?}~5'K$մ@?<hCDDpQ/3d:[ɧXvwbڨ`J _={T_s 9e~YhkI`ع`eƈT?FX~ӌ6h$X/[HFPt1^XÈ\C7770±؍k癃] XKѰ,DbF J͉9w^_Sfo):ht!V@#¥s'u*sNh72YFu䝤? J.Ca4'ʢ46``adDk]z$s_Җj fژϭPҁ9AAe'PJ˒W%%"ͮƲeG;ᄃƑeA)]p6vM$zk ^3Pۓd!: &^Ht\H\o H^jフKIalep1ja;i48r9EO7Jfp$Zl/{%A\:fAO_yxuk!<0~|cx'Ii:sS)$;"=u/S)&zNHW(Y˧8nWhjLةK{{HT+4g0_fUZ^ k ND>i! .v w2]4XWe"j)%Qx/iX8S"DfK䯱B Ίx4tkQMgtU@c5n8 .z;!bɬ@,9J ws QTo[]lR)dϦQ3l܊K]$\SUP%$AwrÝ$pWwNT{q;>q}F0!ػ12`R`㼵N 'Dg'l2j HjyNc1v|%OO>1>J~\aMQq\3F?;~ۢm;'jn6x)V$ý$Ӌ$ᴃEpéIs2OVe^rz˧ɊPxwU.Ryw+CLռXY"&=^L\Dv:~'w_+]] RLƎO:UTl=H zbv`yG08gޥ9+dm{/zF(Z Xw;A crb csXyw2/^5bx8m錏tX:u zg~MGFeC,PC8x"3:N]q{A%TPܗa}ܗa}s_:n;l9YVrcYZi%&f)J4(6+X.;ngNq;såXVp'՜N2E^oJ onJVRwLnm-S)QdŦŪt`H"gplAˆ%ucRh8PȄ7]]9jrWX_W2%RCH%tҋR3-x)ipAs$D@:0QK0_Zq3Lok|H_Gs:ndj3od=?׍,+Y0+V"Ai@,fÞ1A #gV䙡FVDYFV+!W\S#^Ǥ*ҨIb_r/M5 g1B BHXcM3VZ&ߠjGSF0]6ֲ1<\x$!\USi5[o0<|-Ń7j{̾f&7f1Mܦ,xy0x/vSFhi]ޞ63Z褟όC1ia)[U) Ve!4 4]:2]¶014ٖfĕ.T_bn7 LA/TJaJpbx\iK fMf:=-:.G68QZ:A m, R"NQ6Ŕ /-cg(Q K%ăac|c"lhUq[Fad{ӈҒ=LIΑ6Z xRd1"BX1Uak\* Fb [) KI`T$ZB *xZ#$kT`R*|4R(JiBwYəg")`*Cqg`6%W֖80ZGbt` HGrBuH0BLa n]<=p5UO\k%lv?|xJ5ejw[30+(.? tO.$۟@Bp,f:e,O\V,@?Ra<ؙ`_e\_ q&A/XrPƯہ[#AkT=Com Z/!ȷZz4]͖ --KEDNw@%>w<9w<8=wábFCC#yVQD3%j+ç~~hN iqB6`n U͟h_*􉭿gGh+_s/Sax 5P[ylzx/.$X8jnړsm'(e7rD01at N ;"BT>"=^NA`k6зL ~Z,|>Sz2yܒT Z(ngPĄ v}LfH3'(!ԥg\+*1PkT<K]V3#A:R5gSP-9:u|+w׀,HY#gLJNIvkpݺKŵ~ɕ /%54HqBfRe[xF4d ]iQ}eL&D5[]Bke߁D@Ae;PToXu@E¡z/+KA_ 7<0; A=鑥J /@WJά7@ܓݬc$CT4}ʞR IagIOXHɈG٢:ST\dy3څ-$Ĉ=SRԒ=iECm阳Tq(Xfca0E.3 CX*]D\,!@#E(!qpx7;tG4edߕHϗ~4L-跼`b@"*R75Vors5{;to_:;Z9gGQ-7\99RRϫZfGRv:-lˎZ :' p s4-DO`3k7Uַ;^}U?mAe_v*՗"9=zڢ؊:gw\@Jkdר /hEF[65'G)Ċu i)#wi)@%t i 9Кҵ6-hkΕ35E5|FniMvzg/'aDǏ}wgQ\Mx^ޙ{-Cޗf{ߣ6"do/E޵<9,41{7vZyu=xa*l:FKJf&zyM"\>}*[B̀Ek}]{j4 EƸ"RG A](4DQX \Kb+ٖhW:lf}oa8utC-h֓VZ:\RtPt`;Nt/t8'Ӝ*z{cS3fJOJt{48!ʾN.e8*T@Yi\T]^UDtC8N\ hSl[is-4aS:!d5YCooH*=!u0Xd4{.#5'K)Db! >X8ԼPۿ)Y<ϪyV=ϳyuV]g@JP&hb,!/D{ÂqkpK0:u$^SI[8a&$ߨގr(WAvm8iQszZ|oM VӟNxu\:^[x'F UZ8&=N/|YC-@0c$H`i -Z-:&)]]?q)拺Jj9~.K(ݗB Ui8z:cBBL MHҘShSE c>ZyWt7rN DIwx2Et%.3oQ:bɅ2غ[UN:p2%eKz|^v\뿬ȗwcryt܏O3_3٠/`bpl@Ϩ0l)\HJ\$!e46]٤~捰{!d..ka%tq穄Du@ lٷeQ?\z%^n=ޜ'Sj4?yݭ Q]<e9fZIR>|Ң%f^NgT | EՋmQ拕x[ͦ1cAaL3ۣw)al!\gK;_u2/^j1y}KZoP|3Wv:Ŷr6Q:gJA]k7zFeYSj [6)SC*7skY_`ɣRroUE=ݨXo6lcYJyXΫbFycYS6>A7ȆʋH<~_-!g?ԎUrwkn ?G;tsjxX/zvy/wѿOrlcOZ|$l-cQlMkN^+.e4*Ǐvq#Mk}9j2W84k A= ׭yYcUuG߇m6ab nX#1yM1y.?/IaM|'nM4/wʰlcy*/;P[ 8\,v?{9Zp1So%hzCuj2GuztoN [M\PPY冮]Olqnl_st׵gϡ&P(&ϗ.5Ri|Փ'W5TyFlIu~A'(:RKDO>M ++ַ@Cp0K*/-P3mG+d+.s)6pHy+|TOgsI&}2Iޝt}y)5G)onsrQwtS %d؍7g^Nc^K2U߇̏`P sPO V"K ԑX墈ӻr+Ի_.ǙsE9ra(Dh N7±zYkmCPv6_:[V]uHo7;dFwƺ}+*e f]dY%4ʣ=x/^q@" P:MUQ5'_k vCxG0:FJV9?⣝;B*em :aj_Tu#Ju \D;:K8J%u"d:`qKسY`5c"5ˑO Nk=h hyH1x] ,'4^rqu ki=RÍP Fj9/C$65#Zv15,1ݠH` !&\jz\SY@/KMQnjaTMFIr^|T /IսSח*A%4?7B&GXq9IVX4/ 5/[_>k@lAf#vȲ?goΞhUMMGPJ{JG+(MV2 *;T1 meԹskmC 0}Ģ]wv?1BI\UZ _blvUd+d;*>PvGFUwhEIGn|Jyt]%6#%c~Kf'蘕NvaZs&qx!4Atiz-19w ¨iͰe}[P&X2[Ť[3Hwmm$zKz?e` "y.ښ-E&;T)U7ylSss 9øϤVhrCAIS!9Qiٺ6-黼i.j?vc򍟩|kKZX Y'f'})e{l>>@+uRYd,qսJ D3j 5mm-ǁ-Z=2Jk:שJ5 꼱}=BjG=F٣nnEN_V;D\81ŞװQs&msc?]rE㪇7u1^~(ӽ4g0`LV=hwpVz=Q9~].KO]}˯Qΐf!;f5ܙBn~KOIJIІi^aF_E%XN[^nsO|gOj){;e 7bVQɸ/9J-k͡QLZZWRk̩9G8m+L _p}V՟VFg+=i+UVҶXv@u)xr1܏>@N\Fma{ ȔyAjMqVF hkQ̾P3%b2QrXɜ;^:X5+{E cDRRn H(xr,P˹NeLPCbࢩ$sQb6!8 8dJ uQ*&ea MJ~gOl)q=[VH& Æ)!?FD<`2RDbRr@hZY)\}(Q֮ezW.Y*r_g?utÛ6udi:hiAK~OM3 _g8W֠5׿\^wq7pwD{{ \)?L6-< Rv$^rCd?3wx($kG^K>irYEݒ@ynBr7G}_G͔YN^*{*=|^xm:sX}ߔ>)5<{Gl0!—2o\SeM_vL<ΚQ{GW/R5zyVKӷz7,e`U5^%l9M g4޺%h(B2un 硡)?30S͋/7o\Bs ֶz۱=pe]Ϯtdt}J 7=ݪ,aj}g宴qv^u讄ﶏS PUCւf8ΞB 4lFIh-n[A.+ɕM1/8jM[m6AEmCY/HFK-Oqi: t)Xl%9Z65C-+ǪUj1X"BEb{LMAEEJV;=&̲/˓ހ,h%OBqD◔ ,o~.SWHT$kw1ep3S5G y2+wthk5BZ`p)kre,+t0F5 ŜS`m֕z"ޗf5QV鸛-an GP)Bܪw2 QD7r (dzka"Y% S6\C Kh:/Ofđׂ?eGOEQNoOHd: *̘ÕW i t}fBƔ6㩉A];&#PDQYԉRYKmOtҌz~eqrF Lɕ?̂6J 3 U=3`8̀ 1^OmV5De"Tfr[zn9oXVdu~ܭ=;.P4IcI}r=ѴNp$Շ#ϟ[6?WBOO]{0V 7-_-"r#v퐣-Asf,KIujѠM ڟ_Zs'S[JN%a޵QPiT)ixro}mpYynZ\%`3R%-k#;&R q4rL8 ..71ܴ7Sjˆe2@?ZO_c@)Bܲk`oOzNG7P]@ڃ\x&&- >H5ՖR SLSn v"~X,_UmʄnVBKJ?ngb24 ppPMsV2(KVv~Sx*4m?lh:;J:B g4{/=NZE.ѯݾ7ӆ~I_ҬH o<8һNz21{vڻ ?yVưD y f,1x21YE)v݆;ncXn+6UyVrlU:֌!^d0(^BV;7QfW}`x6]ZIeZŽL 6 8^c-N5ڑۡ8 ,7PbkL,R6S"5I'w,{F݊Ţ̓_[7Q!Qq&wW̷1v.q] +d @滾Os$^b%ƱtZB H)c`9=[b0I!,vD_FWR]( ="P`fcHQ:EBgЀ*@L6a!8USB>7w18{BJdkCbSyA0L L`mAQWbWj q+Qi% V7bhf*bETS2,^׌ V+;!CHLAMߴ/4V5KVz̞n ѝ\x;=F%C" t2(e&֨SJ-])!2& O =WOA .nVZQ~8o_nYi&P eUlDPe mw7s!=o6 ށyqYmxj}©QCT)gMSVmEVV_hi>2+ \k3 .Ў".d)a{9޺GZ8[C4mчfAeM!;5vst 9U Bquev>El QUeMiaC`#273yf6~yA*ҝXfW)g]dٌq0Ze ¥ls Hͤe /#6݌)6f[>_%7߇Ȱd1r3eT S|R$[Ǵ1[Ko%}i0CX(&(FH< qB%I$gMv{4SAq+yB7l$37;eKݛq4$er62I(e"R& 8#0҇G {iQde x|ǔw08K@'7of>p|^~KϾ- L m> rGb,^-*)ǵsx-FbSGTQ>汚]޴ XnwKXm{ćiQW*BBFY#F**1#BbX} $bua:^Q\ɦOFQ4#~UD?9M{w!/(З$/@fr*oKtMMޗ7zz|tĚk} C4S@?S?nM» :n\}I$ͻgcz>!)zWϼ\L :nEH,xkޭp60E1E\(.eŽwU yBk0)y9X* JzGdPfa)̜h8Vkk&b ]jzk cd$ka[\,N웸 h*t/GF9W[4[rwNbRhy5'Nae<'U% M 3UK$-TS9c˸jxsoex(k)x!l=)9¢cZKxϊe@y;k>5o w7-ySRQ!Է:'έ{h%o!Oнg~@ۏWj=9fw"}fr݊֞e- դΤ5R`S`̠ΌtJ9U:F# X5}wxg}gkb T/J|*&#ov2%ȳQ6"0ZբIBy|91a|k<΀N"F  šm80qFu3|޾c TwJgYj LN.Ǩ1º!_Xu,障yYjʜ{ 9(@.)@t8lZO#Q!QyMTm69$L4n]ΦI4nrkhi!)w 7Sn:N;bwΦ/*z>!)x91vּ9  bo T/4}~c>gF~>ii[w?y'ĀhQO~{9HEUgydg&XXf2d C3 ۆfD}7sTd(($˱F&`Ş!Oy Ԏ ON/D*Ӆ.ڂP%k!7 %iMN1k)"+q=p%M\Y]on/덃Y (qiqE5k &D L1FV~Y]Zv7G\n87\5JC_(hg {p[o/[Nڹ9f/>UC~>,/^[X>}pߺ+xĝ`'d,?~8;:=+o9JCdܷ:Ū@n wKq`퍜 M%Ip4ZKLQQ9œRa" 9VS nݾ2^}Enc]jt,)k5)N6<=x0ʃ#[\_g'1Q*Q.l}Z$YV 3gRIrQh5 [6ªvsSȵZRQjQ"en ` F}%5bsj#5Snj PL8L2JBSRK”8" ֥8::U eT06]59|,Pj6nVh.ϛR+i;xJ{ &Ra(ViǍҰk7f0 !5G LjRZgR$PK jeO(=>cΣ)KiJjkT'#J C)V~]RPZI19F)Aa(%R"$PKԊɗ7J) C)tp-(, ^jIcD) ;8-ySjT=Q#r]5)KwK}ޔZ!vǍҰ"rL'R`a(jƯW ?Qvз[I4JRjHcD)a(ey)@)a(V@OQ_(n.@PsU("j3ݏăA,:FEH#L!&sN%+)3 815Z ޖwSY*Yd=f͟Zf^^ Fq.e'k? BNaCV&sϨ'1~ gM!Ϩ1χ5o0Ga6\Ms~meV0o}ŔuJ8}{}j `zm:?M &yV8ziTxۦk ls+{zGWazPm2X_jnzBWl\czt۶KQ82bRxTՠڏnm=FÔOɢOˊ6\jWl'?UPfNLXHHWaw&jkg=Ġj0 1/~Q^,)Lk*fԖȵ%ֲ$"[2U)pL'|O)Q8u8X.GɽKUs){5IƧZ-ySjENWD=R%= AÆ y_ܻRsTqt JbTӷu2r_ݔzAX %S|݄㕽Z-Yspŧ뫪L߁b$Qe"{jW?lb]l1DeI&:Geݻ$dߺnrQp\_? rn_Z a2kT>"GAC{ ?||_%􃊍ծpz%Nۇ1_uG~qS#l-2/In򾫎B kn2wk#lu ,#Ag1 {0e27knSV "+hFD7"߾~{(c륜O)k(mͥ_1yG{9|N- b9[ʈ斻_oyc@Zki!zs1&:&LYpo7hC l<,?Ȑ [Gdl*t-Ynߚ@P/l?b3lO3]L dӴ6P;aNMjӄ(C-Ե%iW5#-1:C!Z#PYhCaxφ:>Úsj1iE޻RVbb~6JNV*%WV<ջ쟶U!,jj'烌/˧B#oe`obk%QOʐaԒCnU)$/qLc,K"2/(l iIuXM32NLoG" rVG Ja Hc k2DUrM^ =~ѤkyO>$cQEkaVga93(}ͻv,~o~U("}=d.|bխkggh֟7{Yyq_YCUf3Ra? bP.?)9cL|p}<|?Sypwl/cCPHc@(fȭԨ:Oǘ[KHJ D.ϛR+,OǍRJPJ+ ,$PJiJnx*{(e< *e4PxJrNDmnjR1vpk 9b4J4tE#;e`4SY^c27PHr OA6r#"b2_ S29,v${_0`ţ$ nnYlݒ=`d]TXd=Dp (5d:ƈ0(WZ"8hi 'KCIcKĕRh:]j$=xڒXD5w ؃jk]I:N[R#'5B>"p&Eı~-`Jq^ya-ExKRۖZK䫃#1Xwp$_ɮ5{<8R_zihtJ5 e==#>;u ٰv5fNH)PR@%nsuOF/q|m<G>7\vs>v0Smj*W̺ck͆>i0t͕}9Bl. -6{^ݶԄQy@MaxN"H`:/QC踉PKP &5NfvBowamGpF7zf!m+,܌-EҶWBQ  rZ B TɅ FQ>{z R,^ yg.ik2HǦ;|.Q#@IzQ:7a,HzkLS3~IƚyWk#k\.qയsήV@>[3[b<㎫׈aEOXWA-5!).q! CVS[}9Hp ”8S05it AS^&iA8' R6KQkI$sWNRU h Ў`Ї["Gsz4 5"DzbMJ+~,EB #EFfZ F* H"Q$#)mtђ?ZAy-HH`?iob,hJ͍Zc.R Qh餱JRK&5L Jΐ b[g">!uZ|ȱ M~z)@˵KC*PRQ(œ9dE/5y$(R@wGF#%gRS}7%a Id&Nd1WsS6Apuê{qq7||`dKD+6-T>_+ѷpH*@(\" :Y9A#aUQIo]7SQ<!as?~݇WᯑUjp_(v3>5mݺs{?^,_* ns$k=ԼC[Għ5ٵ;8HFsi9 oןNX[BH0w%=Г-"5{qQIN<0'y}Bzg=Mԑ4焎@8GF{uJ)Sr UL.fw1})>Xze+RϨz>?%'3y viOѴս[qhX7?rޭ-F^qVRWª4?UšiU"X\S3>L硌an_w lj)*#Bl";.!b7RLynnvQ}:^,gOġUᠦ?|*6NgU룱"geÔu:|.ST\=>AY㜪3FT b\'m[6ێMe -r薾#2E|AzeR1qgn+r-|.SZowZEKpL*r!<H/+#EgT ~Ӎo 10ix_N}= Ī:{ هs+.^MQ3wÛf2.oV DϮEn*n -y'bučT z5u~. $Aϳjn[$pnH m ̓vN;#gek6l,%s%E=fb̲^I24JN9wvݘ9]16,Y9fnd=-#)g cy? ̖ H,qCYJRj&-#OnЄ!]xz0;ba[yO c/fqܺ۷h?㧊[ѵGmol"w]D`Wfl6b'kʑl<7;52ܧ\#^Hn.( WS\svEJHxFn%6c|<]0F !RRk 1H6 vD ìA+\c"E#gy} ĔJ!kd3a`BS2al0tt6fz2۾##o[c bF;;{aK!ꭷ EB~?B`cE>n\j|{.>1Am5 .>=FweWH>Mގ,zrrЌWnǿdʬ6㌈ !Tx`8^–HpO(~G)ӡ4~c.?n>ypdrF1z~DLs- 8ꍕt [+ 8޴'@5ǙKZ~HBi[2Ѿz{o2jqoqEP&E6n-}!i]=AΚyUU,&]v̝tv @fWog,?em&Eagaf8DJР|M? k.m19od0L']SW{4~ ?`Pq-&cm,JXZ-<|<}Ww` ԕ NYg:l8B;Kdi@ %6W []|7M}`ŊML{fotc8"LQ  TӂX IYhiBʭ5fFTiK'9% &8*fI@ VƕqpIk :j$/ `4>i;bv[@=~\s"K:)l|]-fЧC]|ظSV?qO!/?H<_,W:(n ߠZ,[Eߙ_'׈ ^>_8e{0@$:>O8ls8yq[;ڐx dPޚ:g\'MWxzs'ö#ݩŇPvȹ~qɘc;|^(7nzh۠vX_[֢.m;v΢FoRmA>_ѻٞjNR0tIv:V8(zmxH-J!2d)Wɶ D;kϚuNG438$iԅN3,4$ !Lski"LVkĬ%6aS$iWӮd, 5ܞtjB?;~%#$6t7d@VÛυ0֩ɲ?SmnqJذ[k6Q6$ ^f)e2Lg䱢5JV"vuO*9Ad(ks gRWxUxl ||%#Na8ݭ*u2D4*1߻ǐMQ}mgQ6ܳh5؍xXZ +ʊG^MIM` S[@/ ]5ue}BS5c~ܧ\c>ƹF Glv~tIsJIIIdڨ^)Sҵ߉@ XsJɻ+O>>Z|b(ˈsaǟlsIT_VhW^Pn4Ix S^)q1^d#QΎBa(טPjUBm0Pu 6aİ]~X8l~O&a#Dú;$fl͞ĭw+U#v},njN_LXy<ǃd稕p-^2W>IHY zpgE+0OJ.h v yBJ /ShӲ$!d@R*FgSyS.txc=%T0,HxPQHJ`WQTzJ5h5| .PX1%Y "QbPXbpo6Hvi\(a)? NSgH.M `{CPJ&gz%P[7  s2OAܛ Ň6&=xwS<}ן>Z޾6L?TEOvf͂u~Ŗ@#:BϼnU6>Z$! }/ɨ~:D; 5烠߯U G6ڛJy9BskdmWz?y3maGRS\lԋQqG_>wˇ4v#6 x6Ul{##v,F}6,+CdF Vc:]S'Hvn-݈"1 [ xTnISh/ ΘmM? 3K7G@($0^gwADjUo0Ŭ\)/JTjFFҸc*;ghid/jDmcOB`MDYv`4 C%=cΫe $m=L.2D◥2/~6#)yjUR Dz򧛒7+4)q=薻23kU20ڈZ)iu3lt aPd$':^jY g2a&:mɾNz8F/؇f-?1glg:#:QBK`]+gamK7JϽ{CHQФD[(I1hS\ʕ,w\{tJB( BpQVs҄=˧b2=PJCXbY iKZU 2m ?*KrZ bP $eIBKN*fȰ{ cQU실4=`{M6QmT;98)d[K^)ĩ i^[!@j4*:|AE WR;,()5ZԖjWUD>]hh9dv(R 0䃇'SFjb&F;ZX!;blH}WHw ـYȵ{6ѐKIrDlab4yy.1y3g˧|-ii[@W<}m9=}H`׾}o,d"Xk&wB5p?.WcBI'ӿ&8SSYrYٻ+ 9X~njauă`IaaH;,f,*4 ig]HXypjt(^`ČZxA1C";Qc{-«":{Z * %I6;pɰu=D_?}eJ(on!9>砬sObFvÀb1X4? ,O F?,wWwWf/UPp-lf<} pi]nw!z钡-WK3[2ih34 Ʌ7;UVB؀1_K3sVQT|2j\<7N`|Ƕp~&6lZ-!G.M}y8J &뻬Vb 뫕/JYQ RCPAUH9ۑ*=Va<gL*EoڍۧuRxSx߹Fz1YB{ZY򠃱|-dJ&%APǂ% +'g@UyjxVi`f>49|RSƖx}QJ *\}ukdhe~L]ki/ww(@tOYJxDk3jw;b{Mr͕Ep܍pGjr}S֓|"I'ngn]i":]ȱngOx֭{Dև|"E8j֭+ RD9휻-ͺu^huCBq=Yؒ{Ycc8n}>mūDbuݻoyBI;SP(Bmt>{޵N>ԯݓJdmM>'b-Q+hdVg%|_YB؉%Z%/KZ֒ NLgՓ~![l\z¬lON_k!e/a|Q@i"' 9aAeA(r4XAdJ<\z\ƔwM-oXtd.+cɩ8I: YT8Ħ$p\ Y*+Ecz£8} `p O8ͥ;T#lP.~H sјJ@3/:χ0.tYQP*]9WyY=aik_[Au,l%/p\ (<|$̀B%o]/u^ƉB ;"V>-0M7h5^p6o Mn\[&;Ço_(4oz/~"'i6U*I/@Rxԅ0[ wrR{`+#Ơ(L Z(Z%I,4 (8 2:=N+K-+!hdy"~H"]Gt-u"K@ .9 "2cPO:_UҬƃAF5rmJ%ԩtw$JYbݪkޥG1xrX4+/N#lE,da ^X4#0Y ,HVa`뜍!`0Y'3(PL݃/5`Ia(|jZV.}uAx1Eut(kˏsW>on~#O>A(s'i4=6y_|~ϫϫBԹfz^^@W&7]rnYVl&Hkkn#7P5.J%u8c䊤wP&E2k]k4irGy^jfE"J+%Z'V|w*Ĕuu/A=7)~#F4U.ߤE |$.[bać5 *d2H3BkcGhHީ^M:u.M(483psV-^R9;Wt !R`H,g(_lA;&;qGpmʦ$!#^U?,Xw-Ӿ)ݜisfuqGM)В9)esmݔs)pyVZG!T&9"ޛc9"n* ^ zg$/8%鮛@a]uCJ{Hu "/72t em0,Ubs6Ol.P' a'ә\jkHj1 yUewd鯜N"*dJ<ݢB(ڏ9tU!bfsTrduuu(g?".*.|[շ#oɐi.*Xj=/pޕQ)W6$}񴿐ƴvSe򍛨M5nEtSV '[Qcp͛BoX7n^6V{ Dz_ؘ3\[ ukYz򍛨M՚:y7#R3DТ9dRtzZ(Gs|>ۦF=4ds=ꓓ%ݕ.|JG1dbU>7Ұk7^##47NkKun[]lKrDr!՗ #B0o=8 砪'p:2EcxLUaޕC؇!!$| i_P`PyFjg$ 6\Qƙjfm>zEMo?g3Z7-vbX Z&ط-֦x/x UĄy'/./x/,7e h5"!Tq<"^OWQ>%/UGq6_shs#, awM7i)J~yssM%h Fqq{G8Q$;$]m;`jJZDkxppiޑgn *A n[> [= kFKМ`[ezkΑ'^Gj(pa̦0p$zF Z-1F J~ PS~obs/s{;VdV(OIf$'giF N2T8e*c'hJcLq / BAe8)K2ڔcNBUi@"DXRHq8)Uy*LLD Q4c)3MP V^7&7KޘaF0Ǥ<ȝGٺ<\_|c./H9 mY@-G!w } "Ũ/W`ޭӡ$% ?}ytw!01=6\r 1Zsr~) b4ߨleu~ |~|lhH/FīG>hM{RHzv2:'U~4'^˻8կRTWŹoEF3X. YN޺;f׉~ZaTʳ)vpej!h׫zuu|%6z~yխ-Mh.FW^<-KC._h^DE C=ݮtW*MD%3T,0dSD/TeH(g! QدjnjS`h倾2N͎xj`|Gq+~LeRvKţOzAc&hKlfJ 8+~X\ocwK/{Ϩ Pd8(צhfn^G+xQnGZ>^?z+cQNRObwh`wF5{uؐE<Ic/;NjC- ѧ[Ry::*MT)4!N2p: H"a8˚QU!4҇a҉stV\"Dw%+ѺD_uNݔfumJ %XHGܽ}` Y')NKua{%\py:%K5g ^wqԦvKc˘Lj^7l*=n5 Z: Wa&$'nm|Cvb^) Z1WRB2\RAەHNdͥmp{W#rc7a&r 3ڮYR!˝ + Qc"(E2J&1`hqZmq֞S 媿!?^j3^\z+rgw  KGHQՏr CCZk4ξL{Ci]- $! x:z7Mu|q.l_[Nj9YI?%rwO"Ölo̖lo[Sjg6YLs;-"jUx==i뻎L'7rh/bA9{\ V ZSr4 Jj"I,'ca 9wxJF.B3唠\)XŴPyKL YLeA\dzO9gqwht!lLtf!I\g"aL7^(E)4 ZDNy@=釵YJTdQ3h+P#IWܶ"K+T Ũ/՘+ ەdz* `3,ӚڿKI8Ϻ$˃aT=xSIbdZ0OعORZLyj9?S:VDZ,\ӊwZ8JqRBJ viXxz`=3.%㶜v HtFafs߯a5POx u.eLAӭ$Bh>͝5ͧz57\mi>h>rxl74G_9tEi L6>.J_TߕM; pTVV@xpO ֔R/z_:=l=:7ŕ(#& E_Ϟ'Xes0jYTVd8*zQ4pP< =\ g\ ~m5`xϴ7yFn"}2a܄ݮ5Efp' &͹8" XZqPd9B ez aj-j(C9G3^'xT6؝<y=QP:~󞐚#^y"uQO>n*)SH ~d<ӎB7mHa0a91V`DY Ss&m Is4Hr}CФ"XJQ@8NSh )RJ)+ޑ¯D"Fzr*BLHB 'e $ɥ˄tORr0UFfC("Z&A {bkaHH6ZLl!~34գ&WB#z~ (DQD R*L9cof9 1^C !H1L yf$Ӝ c3el۹,{x<ЋfZ@NUn3դa()!`8`nvgz  &Hi:g2'RzZ PP(lLc%ZSDQXM(l.Tb j좫#q mt^ &}:C-lo(wq^rei@bw'Q!"81er)И(z Ad7%R)ZxB$a(Vh"y2w:is_ Pm ["!f`7&"B~Bu1B8A$/$@zq Ihcؕ#@OB 1\IIFQnU,#P҃k@6:Ձ8FBtPR(y:ujgVAAq#<$#6JE49Xb qtC1V C:9{7o PChe) b%]jaY菏Iچ1V c_A4*M߻؃C1zc n`fZJJfYc1d4iA ~B$H sN/ RG4y_ͨJ6 Fz32qr+Zd~iݪj9k-'wòӠI%X(Q"T#&*MSꟑd'e@V q"$(((@w,JB[6JJAҔ{]n#9^;$02lcӻ4F^v|@Or:>%q'kE~x|=@t%~Btp7.xf`F$+nwH}JSQNcJ^%TH k>ŀook<^S"8Nq$CO3ս1%WG`z#) g>L(#90y9c'l6[p` _9/|#i>ibN }0B)5n,1g@8n2oQt ۼPm#+_Z `s$%t4n]盒9)\AuNU98wo@甥@XrHH=O9ejAY{Cw Si#dPqP^n )펚6j\R"WT I)Ai )c|LA.JKB4ӤP$YZ*⼐0 JrҜ'E4yE `u"C|v.jA.ۺV@%w`ݭB8'@/$TsƩ] D+I L(5傠:|%KF&l@#NBV̡cg&ȪƋDBKC5p:=Բ,UR*.F'2mgLWVj#QXiWRk^؟[Z)KjѾ1ȭv>zڎ| $rǘm0h΃dƂ50ʍגhthl+jv'X$B|h8>}җ]xwx4x%Ro3F,wFAphojʺPr"ȢRJ{wqĈGL%,2ܺ{2IC18Ti#Q\[)6Q0W&JhT\e1jELٟ1b ?}DA1pŧsW^x?ErUs'm4ߏ~+ïwEŬsm)tzsyPi<~^mmӇ?_lu<6 `IYL7_&BxpP['Ib/p lC+`k!v[m {c?vm.R.#xѥTn㎹דwv2g%Txkq@}W)CUch2ZBOc!K_F>[Ӿ v&蜨ȭ!~;4 5 u}ίw |~e}+OY .#iEG&eZn-Jض=6AB#YC"܊Q{t`}=ntU͋m跫m^} |Kt$o1J5Z{'YVW}xk>ګpȔRMGtҜfdŐ}UG8Q {Ub_c'g;8rojK߿ǒ%UOrl|Vln!NZ| @Y2",<#%ttIa`K緼sE|zKtq!W Bѣ38eR#j޶Kz~7Kԫ7Z}vd|2s /LXU6){zfCWO 5j}/o+/Fƌ>ƈ'rCS5k3"Cހ+e?Jx&ٷk#Fpo7} FNJ|,?o~N.iXcc1[]Q5D}QkR/[b4J^FV]jywEUUV;brg@}|8ۊ?+.XY갓u@ZN;fpl,_{q]ee=gR`~D邓.Ro b2It=Fk6lթs#BCFE&`k<8p߳Lٟpkƙ N78ɝ|kș@nhP < RoՈ^م"ڛt,D #Ѽ9BҼ H!=2G0*>qRԙwLrkP2<71aAj5eeK`1 PժDFgIHec6tiNfin^)Zj(YeZUBQL%(R!(ӢL%3&=ʲ1%Hc(YV$ ceiATcIrƑg+ȩ(K5(H +TeJJD=(RkSgrXJRSmL^IO)S¾59aQ t^ `(q3f(=| K"ΕM^ME >iԛ|g6T!c~z0MoCsn܉;1KPvV RPvVZCm?4[)Z)vVJ7Ʌ(j;+H]Ng+=E+J9TrvI}u(FBg+=i+EK+5^ʼnW+ħM@U矟r|Q9NlI+ANv>0xQߵDƨ@S<[o=XXnp{C n7D՝R >C +kэop zFawC6BmҩcԫRW1\?&TPHAĭt3*ԷOx} ;^_mԷb8aP(W~t#ё)#08Jb1_z8@E 5Gb6хb܂*4=?n.Iᤔ`ӷςlJX^lI:#6cPѼp~TdrьSQMs5b8!zC8/ 4TC Цŋ KyŒK=\ @yS,Jn hP)I edPe6r4rCzP:c َ% dzG ~ϾxǮf o@QKe`Õ򹣦9C}Czږ#oFȉ.,g\%;*&OS-o@(|L2ϳr>{FsPT"i ^9=E|7߯S1p[ 䀞61۟E<QZ4<ƴQQQ'{ ؘf=Ǩ1PgƢpk$:i)\*J;$s,>ڐ9d(Cȉ&ExYPx)c!DI@jTTbpXRZ*{.R*H$9E"1$r- ,QdZՔj}Ҽq+[(%J6CA rnrn&4П?\ԄiA8_ (}df~iTܒu *P~>PK٨HMXVUH0i:Zn~w5"-J)5xKZݙJ:r"2ngo4J:ZK&g+=i++>0QX)\7Rk"Yrg|:.a PDOnҲR9H$0l\$7mPEÅw=]|wx8x^%S.cD@ )+u B0v cTyJ"c^Y|XhN)rVV,N/;,lϤ5u$sn̚9ttppp:Qia6vps-YQb\V-Ψ˫;&[fm n ky>Xcp(K8_~!C@"ȒO]ImԎ&hƧrt_o*G1"UpW=4 (u=PBy<7ztclE2 4p:}_wI_^+Imt3?0cK]nd$0t>!0w TaROq;z9 r~wj(4ovػt&r۪&+.X%2Gsrg#Y1yQ&7Ez%/^Tlѻ ;S7EU.JպQ9a=>؜fK4 N5,B`̥MYec|ػ6qy2|V~~ѱ+r(JQbb#1qŧ>2k\7>ĭ8c5Z`Tqax7M`)_S3g|h S WԞ 1OLw @0hԸ mgŒ  Sj~g鳁lRVPg.GD-9YWRk94ig RE-աJ.f+=E+eJY_@RFt+b0[I[)vVE啌IEa]R_JJOJ>TU+ Ni]R_JmVXi\wt:+5PK͊ϙdIx9V(Yrtb΅B Dc#I9(L"HU dQz{U6V&ó&-wo f!cA"G L# ~T2l  zP[]\oFQ4SVAﴮ} #TڡZK 1q^,–Dq!?6O.cx"Ù2MY:N|TLF>AX*ݰON ͦQbVЎxB;PxB;dM: ZZƽny F8HyNJbJq&Qiz}_ɃSc/" Eܶy1׸7|W \H[s=aE~|z7(}/ W7GDj٧yL!3? "vX`3"lm],㙻Hw&ArC97'u.q4&MoWgw݋R4RF9{Y6 #Jdqܛђ(zO%16X4%ee<-e8*]<#zetrх=>P v4wBoGZE"%ЋZ&xd]:v|lWAB[ѝ߸lΎ#㥯GڟgI_8 ph2e\1?UF M6Ia acMO٠a᎘L z4-uroИV!<bx#4B}y}q@{n94o}>>A,-ʝ=)Q6~Q>4EGj|4v ڲ42+Sέ; yݰ {Z0NoLrg4֚xtkNfFӡ37ξ1)͊>?V jsvC=olFFi`6zaD7y聲L Ͷ зWOg3뗯G N6ZXzkᯇ#W=(gİ[dj 1[9VX^@ }!tMC&QvRWd LE P# T4'N#IQ%rAsZEiNJ@L ?j? ?R)i6~u$??mJ:kll̥ 7|T,#{B=Zh ıny77Ż2˔ʫuA1F'k &ۛ!҅#b=!]:+'y(ZWP\Y;.C!%{tfӝ)#扝[#t~uR:oCtQc4gbfy1ۧ#_Xv׳7[Ӈjo8>6WZ}x}˲On2!JPD082_{l?;m_JpO2pO2}*-xCq” oj fgp. 9\;lp2՜p-NtpX@mz~{~z%FnnM5sd0v+{q,k5fezola1TokV^Yתcznvᄂwinw8m{co ] DA{[i&l:> >M2dMיE UeBc%DbX]anZ(+b‚ڟgoѸ O7,!/PYAkH(9Ji+@b\s@B MTp*/L{Mƃ1z*ˤq ?ذO6-p2Y%=_Ŭ'v]bhox193s[7 E/WoT(qX~Ba>OB̋*M Feƅ}iVV4+ QTI/S׸tsA rv[7vˇ-ؘ<Ζ9Nƅ#)) v2LcMCb  5-`8[/<"4< bw-](jp82F&TfZԐLh&Uvj; SY\f4oAR4)u*,KD% cUJUUe+A",xJՔslHN3Q,ee 3 -u-t[pF+Wu"D^zjB[(:ρ 6&tM&*dըI'cah3 5VwӁGTxegt PiuV#RIr/zj]˹[εC\suCN8\ԧRQB [O6{uxuxtxd`Ҫ {q7A;!ʕm۔#+7+m{u G0!4~"~? )w`B 4\Y*WSi)D"E&V[UMqg%lϾ·9"#{8AJX#ύ3nPi'bV3>4dsk R*'ƐE.t<yշܐʾC9ɜ$=Ӎ{}[@Z6t<ssޗaypEIϠ 7*tj+U@I߆&'a2$;v u˵(?bR?"~T '}^Ǖ'^GUat a"|zj*s^ G 'zy}M8($Gz)9R*5Ί9atSP9#ȈT0I)e\#+<9 0'@RuBRT̨ ~4+mf|kP|&X};Yhg Y#Zy$!AX be%'Py=a,F[on2c|(:g4ك\Ou<( T ۢ}+B>^QeRU4{L/^"'1Dl{s(*A,E%vc@cA Ң3עDSۏ:*)AS U Oz &ǿ,F" }>E9r ̣9ЗPؓ{qJ6 Jz40%'d0Be*YvvoG$u8tݕtsh2w*;u]lU>Um<&`&g(i/ylx @8BAADp<ǝ esF0NC8Tt:@gh~@i`/tW:LnWELЃ| G"O\8iZdI]JB%8p%54oWԸl_E UU,yJ+^FYP΋ -DҔ(́ȬR*9*UTE&5'p9DUNJP)B<2Pi%sY*^H)Q]2e)^7w@ 0X[TGyHm*ߦ-ŗWun1 ״D?w<T&@ ]Wt|\l6cw_40TjWt?&\J#>2-$^_=yel,D"AIvOhԠ~pZ^jaLhmLYF9j]#jS(f2k(d&䕆T1Q2G)X)9Sf0U$y{bT3;X0G@0ugzt7,pb=f5g4֚xtkAZÕすـN61zmZ}ɜ]nOS^LwL#.b쁷rpf ,F/;0j[3}X4a~{m:gbeV'_k'leTmۯcZa.?Yơ!<̪w|;Kcɾ߭V#E wcmj 8JO~߀"OB{ $c 27FN?yXҼy! 54p@rY͓i)iU_qTG'}6Tchߢ6r5u7-%hG-}Z*}5TߴZ>glBKQ}ӦwTi)VK9CKQ}Ӧ藾q-;>{JBK_S$QKߢ 馥BZ[CTߴFr7;.ZjZ*cϧiSȨoZK)uRjN0 BK)uҚje> U% =*3G\Z}C)BTDzXWؓ^~:_% 2?8^f2\/l*aa2lV4ѧ,tN3S{p"X06MJu5:QiQ<0,4e*J=I>(Q;FǒzG6lBr)#'K6ٸ{D'ɥn蜾Q/Jvڥ]9etͨZ|e46fY(77Y2vOhןGD8R(ZC6^}9i'\Z']Vyث5"פ༳)-]Gȹ>9uxLblΒ)F46fUȾ=Z򭤊xn}W?}x>x_gߗ/l8|ŋ|W7Cvfi?)!MT 1\Y:?{{3çSjz;wf|k܀>&l{[R =B"o1GVc,913T8 +'o:]Gr~?Gޭoſ#?}-mkޘY{|Oxƛ %xu}8G}#2`Zw+nrV=N6~!Hkk&n=rTkimi.-d@sfjb7.w+Zg*.~W.J(3c@ZdN(ݒ#=.ySF%hs!]#/"vGeճ aqA4~W.pH= tf>g;8pl-{[gYaǡIa|A91ښg2*qNuE)EZtț#f~` R8Xю$Ӧ<6 c6ljB ˕ji/ŀ9pX@ J`ԗ])k3W;ܻYp!' zӗx[V@bok-ciMT',B`-4͐$G5i>~T|2o H\Q4̸ (:L)\DIj ſݬg9dYR>he8xzd.)>3B@TG>_vE>>]vA e?,m>#b8oN|Ap1JzCЧ?8s$p nf?{tRP[p;Z)ܬaf3w/?}sR/gW?|'xݔH 8ZgTzQ#5RlFHTPPOc-}]ꫢn:[K1R3-RôK-P>fsRBôPªpJl4eӱ\ZY|46^-ݍ\5Y QTCH6>q(.s~atա'{X66q;^9OYYX:[8&Qd&hӉSZå[֡JaUw[o~t05/#r|qDA4<’0Z,uWu6e 3Y 7R6+ bĭ7S)a)46an3-# E" E-S *kc\! Pg,* PN؛Xx$" :8a&ErI+g?T\3#x pDY`AQqV\JZ\e {OA,CS^̶%"X@'Ig1Ba)h)Q ,OL񾸛Za)hҹn){}16$)n Ihw#{JJl]v Rdp1J}rjMl&.?F͌J0lo)  as}'*Ih?iˎ$$]==zǫ\s"A D*Z>8nfuQiMSsҝݺŎcG/{j_V>qio+_=1ţx9]^^q~J : j^>50vD[D!^ӄ/z lR >CLsI(mGbӀ&l4 ѯy3$*܈1sH#jr+ ubQHEg"R "fRsB Y+h)PPK'[*[ kgU@jTnuEp # ?) UX9 Zj*¦S7AHr/|.HzX[csǣ>^j6q 2yIysǫ: TGDv+_4mvaꃐ|g4ĖF><Ͷia[.b+kz?|]RU~w&7v[Λ/cU8zߥ}ƶG;s3# ֽS@BmqiΏ6c2zv֮sTfކ4+W::P X.C֭.!SU|/ϩ`Y)֭ UN|a$F֭.!SU)wvm[o;Z&4+WѣuJbg!9Ý^{r9?\8͊{ Q//їD!drYp+p* 4JʡNyGP9 RN"< *k*AuݠFF8aRҶ5+WLF3awZÆ56+ h[VYVw# QpU\WlMp#84;lJ:1[ۢIK_/vNA^;ìQL>]Sr\v}Ԗ 9 ;}oSpɗO" B:Dl>NjWD'/q1ٟNe+8/ṋBb1k}Z;T'YT[jC`XO7>y;fj5ݧŸl{hZESR}/)7_9%[u($!졄7j*fod \^QUMz;4嶉y)g{Xܒѣ9%skIIZ.Y9~(;oj*+VNŃ}ētCev#64JqyaaqgoVv 3&9@^R[;/cy wtdISЪb/9xsVZGXfJqr;~2RCdDtiuTcL5ذq,#1"c3$`,1cNPZ'; |,-uh[uΨ> <@3Q1 JK։$H`neSy"c[+LW :ٍ5,ls)VGGD`;O@'!m9kRCb [Vi: 7 e4rhfaqtl.rfܑZPXp] 8N IHN-&H!h AbjHGck5 !m6RPMq%ҭa\ ʈvRiD)ǁ]#8L{do M'B,m̄2&R m$U`iM(a `܈]y`+.-[}%} *~?㜆v&rI$RhlzTAhhgxucĤkHCD{Bt8 ^܏O{ )Y>-IwSԐS(4t)%؀kR_VPE{(vV%r4hizҝ SiR;c%-=k-%$mH腖TjTjH}_D+64X;歪{_ll:@@;lG8i)=]ܗaVw%Q3oYǚ[3dbHb翿6?LK.[B՘P@79ƛ V)u` >X* pU8``1ٿ  v.P L{m*4rbIl:1Z.!)B523$XɈaeJT jo"_!*U#Z%V 2ey 8@>Oj@J+n,DHXY(dTĐ0 )P$(#-v&NUr.?Ѩ$"3Mt #ut3^qLXc8K1Q\}~{)\.>誴FS؟7E2%U6U@Mendίq)׻pp;99(/g_p[ߥZ~&O'?&yV a 2BG&JDyا+rfQX l 81ILfIȀL3h`c|[ \H%QB¿&)Un21 /`0L_&*B'eJ$IXpH JD %TlT"CD PFVEnq&N4&&OθXG kH@rabbqÎ?\XGrCojr4yPnr(K#Y}jcca Ɉ6 ܍y(`TIʱA$׉!F J0D28̩A;DIewTdکG x4FĽF9*xR,.u=9iӳ=Rj[`,Q|* pmV04:Ұv`C ^/}O#[Q Cz|C)C7)c> %$Wt1̪xcY~vo&/VYur~qdŧ/Q^cD.cfC ,֡l|ۗsw ,`m`y%R'(uA%$| ^28~TB=_9UF^fls[U.\]Nw6:,W̗׋7rf.cgj"ܚ|1G㺟=kmfh Hc (E\I"eW)wtb |u$Ki~7}pǙ#[0N`:z@ BL[G8qJ^QyA:C)^h?s6 z?iH95s}0a͈.kw@˥ ЎfEf4*-T?tYL raf3ߛgcV:N5n3z;n>z:y{?o'rU^/lI:vr19)؃6_`~;OA (zWU}_=*1^UF`>4ڛ+x-ת  o5T`aThDŽDNHK٩m<~0F'ΠP>ycPݿyzg ט`pÒU%Q?+ ݥ怆<醇BHZ߿uӇ.T&JPFoIլ=Lnf2m̃ԃ}^Y<ԋ-l"9;[^ྮ6` ~:{kz>$ v@͋;3>s,j,rג},_/M+GF1wF5Q*nfѠޥ!\EtqX=&EctB>ǺNecSo^Qhbh3W huSw EuBcNP|o޴ѺАguk(<3>!{%뽘>a_Mj߃nvjun%y&ݧOg翦|sq9_=%L%ބus^*f}4#bJ r}Ȱa$2 I5`pidn K#a0OF!Hƅåu7idՃa1by0xdF(0Sj8ʔrO1x%2z(D=/rP_bbjКFG_A#w<1+0vMO 5ok$Ї*qw)ipRsz2pecPpRZZd2:B7{R)1pڀ|ɬ*pn,r#!\Et -d3uÔºb:n,ܾ[x_GCC֣l1кb:n[XN[-56)>O/g0vX8wP򩒔Qp"9m "o<|NMڹ$N{Erc#\$_.DAwߙu}7Չgr)ˬn%7n:f\EØcDi6C3͒矒Gsju7ιyի.7xtkn^-FP2ҶBf?pruko|g)xtAt" )K%״ YV,s`Ԅ>&zXsVt0H&BLkƫokTdg)& UϋTIL’0H #C(7Joa3,Uz#H/a, < ')a*eTЋ==,L1(.X?Вkea @su^PG2sf,9>qw$Ll;Ƅ"KR!EBaK!úXYFM(F Br *tQRx.@(%i^R(i+:|AKw|~9kp$0sp \ffmt!ei['ӴH 98&e(yV0 n`~`єXl+̖Y`y "q!R8-T Lx@@3 LDm%X(oH€oc,Y²'!xhnV%e&R:Oe ZŠSGHqcAED9t!~gi`f*Ӣ,(6yRsD䰌)+0i2{EF`:X Ύ)M ,c;/vO]GH\nHfp|/i{D %.čێ[գ ?tㅜPp9Atͩ^m&-WNbDa|F~;Y8Sҍ2%Ezx5' mbv0a1 ±;lZ(5h.3x $ܲ;kֱ]`~uo.z6A`FC@h&qќfgKep:2ȅ 2y+;r1L >ؖ]$~J:|c(A*O|i0 vD%B9431t xcR@^O."$uG8P#jq}r蘎]9A]1d ]}B1pi >[p2ܩeᓱcWUNiɈڣ*Y= H--}JCoN`0 &VNDkpc7㍹&h$v|{0?|~uy{SE2 q.ON,P <x%JR2)(Yl˶2N$AL5jucj#;@`yKpf_1afPzٜDi.8@KZzDK㼜?qhv%XCP KV6TJG opvX78g6x'pÆ(E8B|;LymU ;>41C:8~̠d#y{N.3WTԍ (}xS41NnN.wh1xIǒ?r]_lL~vw _vQg?:ɵ0,ͭ#sZ&qII ȺwyHu(Ά#?҉'O}%WT:&;^lt8Q|{rw:8XSLAb} I#XR# />y xpB;/Fj?/3GHVk5sެ9I~SDJx{Hp@ϫ~2gȹ*'ozrWNB@V_L=-\E</s03sdzdfs5߽WӺx'Hɚ/|>}>i#6F6k$͛8{g.G {{YzOPBaP)#TK*wE5g<.hxnwo)=fDN(\[Zj9Bܾ!ҪBDMsUYqbDzy0*U366ǧpEo`p؊1jHN!cG84>g/pb47+ MU&1EbVyJ+ R,t( i)Ҝ MG:/۳U{Y)^ šc{t Oq4q& !U{+e2jB)kluۈd\picp=u#pIPm䖹!-@=;Ѭ&1ǂRf&kҧgx #"%O֏)Rm SoHuO|ˎoC9Q(yx cˤ}N hǟt}@Ǐ*O9a='/?Ùk"3(Bs,ycIW3[[{At>[n8tl/$B fۥBhkхTP"m ,d* e4c8+T4%" 7 @̤i3㸔.DQF󂥹(L^h$eƍ3[*6X'֝j/:&zHK"d ƶZWs _>KygU둺I +z{A*yATW76ױ:E;Sco0 I3qE?|wlX)eu`' B\şWwV`SѭdL.qoOn.`SLP(|6-]䟾w 7%!uwQ"7qjyq=Aa7"~ u ª_9ꗵc wG>">/|~E|rGR68qi[kuw@x#as Rw5,L4Ej*\+ރpyM5hZᇕ(|yX~ ,&?}Z,1,x+%%?Ld[+?^__\ɭ+ٗɽ QE9j1}e^A`AKZLvG#v~ {Tc39jFG7FF5{fn WeUnԪbjZXՌRլ> ѡ>n8"P3и(T:$i Rآ=^8_|X\'-"^?_ Xrn΁}&C.yEo!+ocK]`y JX^QtOL#&6QQ [lD:̐К6o˹'-$ÒG<.win>KJDo_]990o9f@ 1'[1f9 f9X0kHKNFsJl*eIcۺ$~Z"NE>YaEO3؛/ɰh|')v}{XX!lF >!ᨋ` \t.V !gB)cec&gZ"ޞlŧ8L"Af XYXL&Wlve=XnuWX\+>.ogAB7ߝ=Lu1H~]yw\܀2H8TPigӎ[!9Z[a- @$LK^:QW 7N+-଱D b.10A)+xጡJ<ʐ+!:kmlP>[]Z :cO &Qԡa,)hty#qXo|B1O $(ZF \R!F\ b(pL=v-- zR]isz1d $@^Y1"@/e QߡS&BhUQ8Ƥ*JڐJіe)"0Y($0qk$0.7fru(.A "ZچS1rcjˠKg.\e! i!G0:d5]%x0{"6 $MBG4Gjp&A M +Vr@R:,RMziMn%o =-whڟPX M-XHʂSJPxjUI6 6A'-hKJd0˛d&lMwntkgLdy!bv\99hxƐ!8=Ow<3}H{r=G%F(J1\pTW9mLssC`fhsAEyMJӤ6&KViQ ECy=ҿ Ip 6aFu!Q-`r`1G*Z"Z]#t`ER)<ת5~2HC ?GPX5CY51]V}p˚VsTWxF]v l9mN@@R>U\ ϣwPn4O>}˭!K ?xrvvJ%wf|(ɫV'VjSlk# '[WF7-&HN#W CT:6۾ilwOhmEɡ=c,V8x\PȅuTifb3^ ;tnDj50}{cB;|Hn`+.[oOkvDg8֙oWיO°홯GMa!//ѿ*f-dS!u*W#uW^-u@Vay;{Bӿij ndY^\YJ6,,桩m:s/r jDht1jP`jDkƉ{tN_Z>5kcjnf0`˥G[h9(` =s\YɀvUPc,XN-Uskԓz 2 2Keqc3^ C8eͣCv0D!Y;ƅ[ z䰙u윿U`thqo#誳aD {ߍ{:3{m7aߗg5+mF]C]2T+kklsYY/z7 65^O//Κ%ȷh].N>ٻĝ-}-g-|^s/s6^3+Nu , B`Zse W1: SXb VHGuFr/)hhqG\M҄IՌH!3q.5Lq,jfE+8EPhޟj$ZLP|Jc;%4ݙWqcR^ uCSdϕEעTJw֡-˰-4]7ydCL1*k(ZpH}$2V7HL\_nwP-ϟϏkJB[kPwuj\E Mvf>}fƖaW>| _c9v~'N{%RI`>4&`G*o;rvɔS%"zJf RSj9ש`BqpF!W0rl$3P.m;ǘ kSFx΂$Qh5'2^^1@q 'D]A0{PݳS"%A&\+=^b(EP2`UXNrb1"63^sm`@ɣgoCLQ.A4,zc!&:Ě&q&Seimo/Szѵr9C(IspizT/ [,QAoٷ(CF{vuuh(C+}$iʹ+`'ѯV&ɮ3Ve`TLyd*Dc UyFgLi%C!gܝ4y8}}HGagwapPz;N}:ż+/now7yԆ0.BbFv88fW/M(de&}k 7c׿6"n+Nuvyb+ҙ$W2Ygq7f1:FNe`/{aR앪C|<>0%]?>w@E Kwl1)[NqlUBWmqXr"GW f9] {S#~n{@0L>x~PJa$eu,)\ (S"5Hroڵ3G Q\4hBSH% VRUDx.(G5JG!׻v:L#C@E8Ry$(FK7'SdmQ@*HŒB9ջ>\V_jΔRSM%g!Ȏ6@(Zoo9 rnLn ִ['S'Bk`]M ﶛb"Lם>DSOd%{|+\]OǛoSKWџZJ xʅ_usw>u@yUb iK+ Syj|hU!;hD5RAM\s8v7ŗ `-. {5Oͩ aDŽ>5iGX.b9;wj5L^@)2G(_d%ߕL4|eQ3&$9 0]-O'#7v7nwst/C + 24{ F7qqWB(ꏇM7RYd2PaA4.Y*opB0[iz5cڊ?|p]M-h{y2 cO7x#kQ;Aac&QUƥ>h,| I޼Jgrv˝>H_cAu͠xCK4a^<&&l>sn'c;4QlϠ- Y(z2H*Ў*1se`'/W#:՝ cA@ºFv8cWL)t!sic^r+-7i%H2XByXCkeTUiKK=JFvxcJҗ0oٯ0 NiV8x0N𴊏sTL(I0JL "uF0AewDDG)uYR*JuV>r~QU5f"Dq6 W1-mEC4H>g OK5]Nug5 c_ee>.+8ĚjMհ$k?GF-Ĺɭ tbLё}cO5怶W|VOn-wJB;fGh)b8A>Btv:Sry%RQ#s~G^HB+6 I_\D}dJ}uֵD /.:.=w[9j&$/.^2UjWoCr}GvB/^B^$mߤQ5!!q)9Ydh-3ʰ>:NJ  ; ֬ũDпLN Nr.:Ywֽ8od?ݽzKK:g&(dL Q/g̊؋A2 V/pbS]' ^"\]VܫM[eZ&w}޾5H"C 40veUl5x2n: Ry ^}Oكk۫g{SyNOz: p TyiDžZ_MHs^2A#w0szevLFC[Kc\kggE %1v*T>#3~3 g7nq66LtF$C $$h4y؈읶A@!n1zK#D4A9!fUIq҈xRt}66ƏLB LP%7eJrbI"6T>A[쨅BzWU_uU߬7TҎ ͢b wRսSZ@HA#55YVWrlV'sb^^pH$$"D&* `$DRL& b(O.eDsShp!:Ղ@؋b"O *5RҺkboUkJ<I$lTGcE,V6{]kXÑ6H q k񧧆℣vUZ E D8M04`05PAmP/A Dh !IBXǩLbr#ąib8b*۾lL&&$ݗ4x!G\Qȸu.qÍCIw]犮bm^8 v*hG9뫞l}Xg;_,ߨN;YLw^"X>fTXXzp;U$$nPR,:PYAf\CZz7ZЛEtzZP D"hLHcKyFa!*& *+%3"=U@R㔛1LGX  1ga!is I|׋ߎZвng$6W.H^daQ}F5*K,0mLB)Dj^7r_5vT6yBmʑ~Q}F5\D)wRʭB{![Z.RzR*HgT)OFҋ.=o)EMJZxHRn ˎRbf A[)twT#EJ[JJ}*T |,x0΂'{,˟Ͽߪޭj~ɼ|ԋ@\VFFsf?#ZA‘@H%i >OO6W5`o ǏLi:& K1{O?Ձʢx t_rd5.Ywۇo;o]Ѿ]on75MD1ˢdVڇOnt>M[ZpOōhd'k#,Vi>be?MR/H$O`P+ٗ2W++>K{@Xj=ShkļiXWT#\+aklw+1 htEf- &Q\hO0|{ʯI |`WE]g6I$uV:YG%C< 8O?]٦MXŻ7/upaۂ /6fG+AJE@E7{k DqQt 4L`;OlW:fMFUޑ ʤtж/{tv@=Ꞽ,'Tr: Qld.6̃f:")!'okPkA`;-jg9Co} $BG 1| #b!EJI”3J҈$TT(M N:hp#"Kc 8\"%1T%2NF P4BȞ.Ԕ^  IZr`QK(D8 HS bh9Ȧ8*%L3 Z+/6*񹱪 XVQD=Pb<V?~~- !I&8Aaw+Oz/2ᣭy^!X`Paa9xA:̦[[cu5BۃHx(0\h:nhx([?h&i$`@ӓyضDZ Jm5YBmqCR) (pK;TKZ_ α`"G)E!Nh`ʐ(~ |u[Wdw9{eӵ50T* qf7TXP<٥ZCP<ѡj~F }|>?v< up07ġuGs㢚XKh8v16&-5/Z$' *xYjQ2a1\*:U ]gu($uN4WHv<H͟PWӎF5M1:(qq^olJR[w:uo@4[*Y5q{tve$ۈqS#VΓe*a4wT$ġFe3  3>LY (C)HدcI lׅ_v4Ik/V'}ՁCj|``1#UR,P4BXޕ1o|fgKT"f!o":)mx͆^;GtQ9^YTgkSKNKz` \ U HmbCTG pO)C$?(Ī1M qr(e 9ALp!%9VpϬBi#$eOp bݼ%9A "H,N*tqY/40)SI-RPw$%p&d"@% !NytPi7>NЦ}׽XfFLL_?ʼTT a@0(f KinjOðɁ8&z7AkSwJ3]by: 'A'1?*[7ucjhPiw$}Ri}^&C 6םC6qH:Brسޣ!20q2qg|VJ|W 3nL^pJC};L^} ;"r.gfޔln4IQ58L8-T_\U,~W3 &/wL|D2Xja>G-)1CS0Gvɬ -@yEP08 =W,>'\ ͡s٫޳'H /X )l0xrt | K37\O Q l[Y0'L4ac0+kRgUЀv1Zf3\  %Sz0o ֊i'8q#'zMﲃaJs_r)t 69 ' rZ= %3mw"=jtּ$0'Rp-\ ~&N*q}DŽxmQ:RDg)Äh0$$سɾA)QCuisU5ƈַx鸫xUpYu"nR涧W/ &lپVͪoπJ~fM.u].s~v@0Nq=p+mh {m}z!~ 4:^5{ewݑL~d/Go7\c:~ky7^4n>zVelfUݽ[(e<׿eQ8~>ӿU@Û"?փ؅u㖻WTkt0NwcgY<ܔyn:}77: M}z:zpv g3_-nػTuiw{P(Ħ_?,a]?kf{n` vsj uFeȿld$ܿ/2]#Bvw ږln㴪zfɵe1zsz7ueTy`wڶWOv$%MZbS(Gdp޶St7@>+hP'#F&P$?S M,EW~Z1Pj&,p"pf)B &*L;v}7*H}1=O>(ѝ3M |iMF-.vE/=I# &;_HD,<22G'LR #u=O/:|a/:nA-IezE1GK;4ĪbZĵ\(#tB&)%S$ik7k& Ssk ϕQ&p@2HU`C4<=t ~wO"rIʈ"-E {1)m/rM^IJaD*P , İubj}@x-@x(2 bixiB4b)TP+mELLY/ Cn7,ZmeId>9-m:8kSYdEhי+| ٱE=fu$ 5īux+ORgPH2 ,B"VgI<oXq?Nk1,!&4>΋ٗMb @걚zb8+5 z"||jqVf3%f~4 跇(3زz7*ʨxpev^A4i>|xF|G7Xs+;V9S`~iy:4hYa2[ &2M^4SLuŏB%~Ffy˅5>lwI%y_e/”*&Jw,3\kf9D4cz}]h8UFd#j&)hIQo1ܶTԩDzh.i`$& c^TI{*kk+ޱ:|T7\^|1汚%ΥɃirl)hAX㨇56ZTȽ?LBA^V5t!l;#^)^ ۽a4yüe)m,C~xp`8y5zw\=Ni\PO`Lg%;~h?o xe`>j~;}BUt\tx@v)G[!z%\(m .E@'J͉0&15&/0)2i۰e .;?dElTL0RD&I>E _dkHBhL 1DFA[OOy(Z6 g J0F2X`i:!Mɽ7Ar)`b :NdeDcDZ1 .\Nk7 k]*kJR)CNLt^e&v$YyHip+&BٝOy68a4g0ل,9;JJep∍ܒ [2~xcyQ:X8гtxqǣݑx\Y'ۙD$=w R:ϜQg<oM )/`5e3H%6㤂2,[su2lI8]ڏtZ-HĬ<*@XgH,3kld&Y V׽L:PJO{4/Tv*3Ro*b_^^zTTAB>*s3 YC2ML.پ#]~UҖAiElO1vﲂުWgV82ڔ:X z9?b8o0^'9ֆlw+[S !oI;2Ճ+0u%VNaM RC#v|l;OǣA\N:%4 O+,DmQ9 oWƐ(}d c;+/QmIN>k0*ϒbHh&2jCs)ά` v],Xr!gvf # ,rףoy(>\s;ӁOO^5y{LƓq K6yo@?+%;C k^E#góJîPs,ar¼")ЁuS3WwZ@M$23SiRZ5IȜe< n}W%U$cVCAOE*T`q@ I{"EgSs ep=%sUIρkL[3S,&uE `m[٤C2S5r®]+9`N=Ai#ˈ4[@KOynև7gm-5/<_Lkz_uBze& c+Gtx^%h)i)2"ⰷR `9Hc?g*Y-k\XAdX( %E?_4XûSgXe*4(b2lԤL o%r$\qFxUW_|]A*,R&(UqX !ZUpau6[vboUIڙqrum|WXv)l%(W hjH0eP 2=;fghsYoI+tvv>L1i5cA@\z#Iw\w'`k~#U%Y2h^wO\R-"n=Q ;U$DAK!h7˸vKL~OZ\2rvYhJd~FZt)@u5Ot~28D`w.>al i}~|~zT,X?0K¶@ +dɤ[kޚݑf+b(!+ xQżɃGʌaCLh0/gt[[mܱ.4mC _C,&n1<Ƭ5X0=}ohr}Q_q g>853R8hVv=*(%g:'@3r[N֧&qoG 2.o7bGHNjQ;7i{|Áa5v5?pOϒ;ɕi.qP3dW+Sͨ9cj4mk#ΰ!@׍Ax4#rG® #0fs DKJ A{j5ּafhzv?nԸUEʩ_"zG)LX~yq !w ۄWO>mJ+֫Mv^:ʣf6(-c-` f P;9ggQt/\7ypFKWaWRQK_k 3TY.MZCv~)4<WmSiY??j# m6|X Wv+Z]t0V#YaQZV(a4_S61@YKX9Z h|yC|k`޶z{?ŬAY/@=.gy9 m/q'jh`_۾i޸w5 O!I˒tqA`XEý"`Q"쉠|?)ې@s])-zVF`@$sڕ[/__VjhI[υw:ӓ<3Ӝt\vA+ Ahw).5>޹)t0}t>'i0 댍ёb3vqQD"de .jmxB(٠: _ǬbOnb{x_YSͦ2>\$cNܳrşG/ c-HΙR;#>G kwF>~}5>Ruڌ3qnEw6}735ſSӃ0O(~pq45݁~fo?\7t~͹R.#m(/$|ך _N v]>u*j4IuG](ؑ=S RNH1%}i~KC: IƣhNQ8 1y4vJ +ēX8xKuZXOwZMs/sgDȗӇ:oHoP*fv#fkt۬Dl O3M8]t%D꽜ws䊜Ώ exy ѫxKU 64*wrbO'("/5,3INCMM<煸H5,ɧ5%|tS[7_z6QQQQfȄfgoe\S /LL]M8>;.:{LmUh`-U޵6r#"`O ~88 rA^nr_&dvdK֍l[j_"UCu"d7WAQc)S}Xw8P`I!JiGQ !rHKh@#MB ,I AFA9ECZML" ~y?emEM[n37n泴p/_-gwwÜⳚ"PLJPduAJrd:7֛Fj\&ф%/Eah_"+c 7d0.!cO 3a jdiMi[6P@vFj86QDa@ZCC*3iόBe 01a쾱(EX2Kk%VBe0weI)VFZd, )Q!sg,A@4hv=ߕ~hZ>fOY  d$Lj?|Ȅ/7ܧB꽪绣ӣr6'4K˖R&c|~-?`@ՅyG?=8=efW3o.@Ř0T˳5(]}{pЊɄrOK=1chWzsSZbmk[sVV8$[c]7w]Q;! MaWƮ!:9:<F i簨5QQ'WVi>Mĸg2wu4͛\FCEHdKAG!C.gh{".A Wo[zz8 5q*FuI;zUq5s.FKWŜC@$t:ܭ/%x 1vĀ` gcE'oMS sw VQhN=r/Æ ח <S{?S;<߬EV#d^ȷNAA |V9UX ?jkGW槿>l*ſۂR"ZoL!Oj?=)٢:i|PLNdi'Sݑ$mX8,Wg 6(3!" AĔĚG oco G} w ]!#]3=:և [ں~}l# =Uw|_js_gϟvV6qFl=uh8DMfcCZؕMS ڇQ$ 3P3 ʒeDɨ6HpsQ‚k۸,ѢDc( "(Ԅ؍ 66 4 }"m#'EƠWYm3Rr~j]AspAY ~G yN 9N10S%.Aَwl&Ty֣gd&cW) ۏ3y AD[|&qGO=*KM~IdO]*)))WѪRJA )40 MrrZX JB CAN4*ė7CGjc𭓘b@F1T(%0Tk#%e9eE)Lΰ9#ؓ1 tzW,sl<] Xa>zd_sטpgdȜYǻKV% ֏Xʞ.:pU7N{p ]OS 2փwb=6'5tM#Y.ιHծ'c?{LpB<^&COŗ5KvC!Bh a)p; VO,r|ECûrVޗ|mIJyw,g ӝkqmeDh_m1]\Z i;0 j?/S_0C;͒~ / *`Q!TTjI+ӹ bYN=\rѫ2>{WUvWS*[7n)6E :9 GqvK tR3{YJ:nmX7$B|Ơ8bb:E0qg[z)٭ y&`SI }}X.}|} \ g#CT -L0fA0a⋐vÝD".2eGGbNWKI5pZmwm%Մkp/hu1E 8b )|  IB7c,d_CZyH#JAK-s&$?EBrpbuJq1Z%VzVJ;P4f1a4C%5+^ *KB R_ץ̻juғRh]l'z襕*򥻓$l=luaVoD]rէ5ԌNl@ֻZ)J!]آ*Gh$]bh}xI@ѥ[1ikyY.I9iImwtpo7ܕ^,Oi9q) ߧû\Њ[Ub8eW1!5J46%)vžCwd3j6~/'"r?dv5 XpA@nZT>P챷y DCN082y R!n!c$UMze+9!fU HJ,J)i((a - 19\^:/; NX;nSaw*%ͿS041B J L$G[(g\ 5(S (Gb 2؀ˆƜI,!)]AH% (@2h,/Ңu. hr8F;ˍ }ZdnWUgxzT>zS`*\~+4T 8!/_w/h%?I7Ϻ觻+َxX`;_矾dȇYU8cw*J@fۣ51Fv}>~Gbўָ]=-ahrޞ#Umga#y-`|un"Fxh"b̬D=~z>X#-MTb)00=d. NRHK]uq&)yKZeO >;SUJfwm~n]Ae ##8#zB%d谓%+f$NX~W.NJz:QbSD:,i(uX'= i.|Dhig $]]H«oGq _Co$iŋT7ZBW}J դ"HwaS%>__*R&PI.lDxLߣd{XLqJ'_#ͻq"0CIpa K_j 0'@\+gg/3k4S Fkvܼ]!kےCN O Rюw9h6 l#y X&)wğ~µXIomvbDYI?K{.|r5@jCzK]s"vK{c0RS/]Hk9^Yo/}ujiƧЖ2SB7 !D\"EF@r#4Hk쇯h6j3.S͖ҟǻ-֪e7}M׍b3Mk+4dI+'(JWMBQ˵b$rҊ(9O v@p I+c9FP#Өi;ӷ^l1JaeL.qYoURqh4y qd y W!}% B=@GE1*PDR0{_")Urch4H+`Q&-{F29׼1gL(;:#M&F1kڿ#Bb{펵Ѫ2ՙԖ;[qh)Lﺗ"Ő= 7nvģus 3 ڷKa~ hhirꇻ~LoҶtƾչs)dԒչRZAqa]uڅ@mG]uU@?m7]~^Q8o~GyPn tFOxGQ,q"WRǙZ b1h){E`~ 1(F7PτVݷn5H$LUnm޳{;ܖ՟6FVyRP 50wEkvMdO'nP>M?Շᣟ=}Gf Qvܚ4wyz糸~tQ잭g\>Y\WB)~XN~O7^H/ h!~w 4 8WμuDNwb.$7lB8z`Q:/E1^7lO6:bĨiݖnCV ۩2j8j;B[coAwL=|M~A;ж1:жam.evhKLllWq&`dx:DNewEsxeE-(:VS(obJEPN rj3'67uv{fKs!Klu+|N@h=W;WØxcF a2wyv w=ٝoghKr;rN3n9 .󩔺>0^a8tN[[;]ȣ}̨w{SCH| Ɏv- u ztGmH7<7}eQ }b&ꨕyDẓ 1sE_MQR+.bqHNjOۂu\2~VnZ=X`g܌P_.b#@q uNUS5W7AҒtꂏ1_Y^F{n1TN!?)%{In,u+=) mۺhݦА\EG?κ!rXn/E ׻nU6Z)4'W:%g(mnm{el7X'@-KݷR$ͥHLV JlI\<RkF@h$a .Vh/AG۬魏̘#kKt(B K),9KCX%d)~y֛[}i=&|̜ik'Vb}>Xxϫ9x.+c;-;E`'m=:2mϜG12aMELHDP1^Qk3m>ڨ|39 kVRc3=lA 5BQQGw)L"uŃ4*-]}n9r!o*\T OHV4? .ob/L>}>Y1!g̥RV)PIdz(-yn}+npix&-̤_σFkd- h=K1ZTȬ'6;([..NX"fB'gH5 adffQia1;Ghݛփv݌s$`:*ۿz..j#eF3V!'|:b0 3hE$䨏ֿl4|%--MvтbM,dXb=Z(, .ʳ|ELk 54'4N` 2&!۷lr x_||d\}4`|GmHlLgm8Zd& Rq';Z}h-nrݙk ,3H8K[˛'z|!*e-`mW}F"\TU$_ &rQ[y&I4(&hYSp`1Xkyh5OgDqtAS$8'$Д3&EȼV;F뤈 R2al6=@)܅hd'v h@ bmRVH !sͣ5J3₨~_ Ga¹̧>M0EʂgcXP S^V耆~BcS,2Zb',~XiK~Ӵno`=k>t#*=K" ⦤P:Bё-(:w |D#K$"Q.gIpe+eY~= _,]841BК}&ł wqtۣF[" \+<91! 20'g.A<'9vڝ@÷aw_?G@I2J5EG3)-K9ѐM4R L&0m;8]!Ey›ԑ`ZBJXr`;!S_/fRYMZ9Db6+$w5ך:ڳj,4eQos`܁ 泥ajsy=AHnb" Vsj&e@!M%-#.(5<񤉴x })Y(2EPտTsಘbug6WN1RآF%[-D壔!s%M岨/Izuycn7pS7M܌hj2{׮Xϗ#"Wygw ̻Xxת&ߊtS^!;#ߨmiQ %d{SUx$ի:!iXj|ŐC^=g^5Z9^\||<j >>T~M YV&+#5O=JBP< j +7>f.W験EP" k$~Cx|mN)̣/cӜ5ztF煳yeh^q$vzfsZi2yo}`U&72ROYlV{J~NNFb,O-K'FCg) tX-&7v9@{ןԤwD xZG(),Ǻ)FA6{Ѱq׃&N#5D1:i|fB)QZǵ]ܪ"T צ_"o];]KpG9Q( Raq 9[| ׷Ϙ|/1$(.>¸aMs^먍Oyl =U*> ZeLf?O>_u]dPQ"S-@xE0ɛBϤnrEz 03 UWFmcR“0S6P p4ڌptZK7AjdI9yZH(K\7Z6]aڰ^.U&CW S>˸G*`jaCόkSkΎc߯7^h'@n>m;@h N?DYi1k5'JpWcSuΥ14+B@fz 4~$|!(-1Q58R*bEBcj:ݕDՠxbU'j͹Dp^wcj4a@cj McJ)lOUC|U/!RoU[)Us͇]nXƑu8eEA!mJB zh5r߻AKN9w4ô%Zb4ѐEk|ڄ _+qхIy"DjnQP,bGZ%`"Xq~X ِK\i 1(p*L˨x1ֵ.s+B6 a8z6]+6HES`e6{&.-iֶ5* >7L.]Z!pR1^H=3)*A6Aouimmj[94C }@r.% Jkpp o2Gtџ\^J:_].pJsRy'`n+(v,P,{|Vo8Sj0yp E@2 59).SV-}.JӔEXM97J~WKRRZ3Ȑ^!b,*0w][fFo3RDT`f ߕBqFp=0d 0?PTS h1!s"Śe|TΝp&h.)?Ҕ%IXq)rًM8XV Wn>B;="#2 q2Kas8"VLe G{$Tc)u>R &@)(8066l~j5G=d5ݳz X\#^ z{V/X?8뫗x`Nh#z l ؠOZvFגqGXk ٯb,^8 }*ƪ^1L~cG/"Y1Vy*ƪ~[(*^q%سb)ފS1Vy5:mz) b,Q,MZF_nE1%ӎ!o d`[0w ? S*To;d|7cO  *962ZB[(e vXKLʨ(P'd(My߳tnz+8~[Mq2#!mIyvHԖȬ|Zm.@Ivs!dƣY?\ohŷ) l7eMZl7\7wkC&7톜q/ڠXo7+u}?Fu##Av z] ln p-T Yߥ+̽Hm :dH{m݋x≵@rGP&m vM[L'0MHL )0C$,2BrDcX=\sף8Mċ*1z5zw:lBMTfʣǟPWES[ 1.|ߗZNq6_ էKTKLɡQ /'-5Zo^NKCR*i)͛ &BK(9Շ֘;NKZ ޤ2SNKBx]R}n_ee]RrԠq-]M2DZZjJj(XaAJ?N5 N)'A~<9i&7m{)=)-;Iw) gc:mJ$z9z^ xrBX$zy (Uu$)WP$plDH\nB(͎zŒ.'*m@(!<Vrk&Db .f @Tq7THN(9M,EFa/&0uԚap<[^"9N8Xe[x #L (=bɏO_;Y&WqzA]?@7'o9a:IRLxL:Q\L#F#j1uz5VfBƧSIzeGQ \P0: B@ɓ!ҩJw;z>'(C91=VS򌰈~TXez:JCiVqRK2Ηe,狼I6%/ .Z#h@ b]}MKp|*R({m9aY",%.40ٚa8d9!%%,wz\/?p܊0݈e̝??Kb."}|ƁON`ZO΢"NE]&3}f$8\Yjf:3B,0 #-q[w%4mBmMFnZtat_|}L L}m?~c7;(ftpqk>6I~׏O~wnqo|Ӯ0N7J(a3fWza cYg)mQɓ:>[Gʐ lJ8 C+d[bcTv՝fs ҲO ~ r}O ~fJ]\oG!uX0~+Y)Dž& kcQT opGVXdQ0n@c(x:0ư;HK4{|Kpz`Y]Cd7 g7&‚J+RH@{.( ϣOϿ2".lՁX-zmFB-qJ]Pioݴf.O7o2<\ B %|)V͓ިbLs픞ֆ `He'QoAg{;xӦJ(ևo03f>x*|gPqG4QndzSi#,y<s c l0L Q%Z#"(S~+̹y #&50Lz+).T!}YI-e0SzlLU\'ܭ|5/YBT_ @&5N6iö`-xM~U UO˺oFGG9w97LWRQl,5s=_jvs h%X;x] lHW"Fɂ%'9_Cp;woO n5Ogpg iiIڥՄ?)~u)>N*rQB@U\Kَ4?)^]4 .ФQ}8OO`y!fΫR2ӄXraS;iX sVRh[MofUb!RjYWYX+זf` Fյ_N5D /V 7u}!kj`H>{'~>nɜSN.S͈yה}bt6ieѠXoUm;K=O_Mino?TyFW6R.'-Bw)O(NK M[cSA?nHhPȀ[Y5悼7_}^>qwwD YCW:: }qu~h`}G9+t(uZy%[W::zuBnu˃2SU悞$S_ͺ/)iݚА.-Sي]Zb]w[K˝oK~ۡDҜjnkizt_RɶCK.9զxAKwQKy罺%[ eKLi2_JJ%N"gj#_ThU._sU*voqfţKrJ=,=%qnt 4vMLItSQR(jց[:_gI+>Qեw͕ͯokb[f9ه{;D?=o>.m x2nZx5WcnݴFaW%ɨd\J"ĮZ4j9,[)jңB;M-d.L[M_YWeU|zsI ׃7X|/Vuxz$Żix?Ǣcժ5C5񟪛~$Z/ $hYl ! Gl^׮UÿT7lNZ̵zaq? R` w|:π"IޖS9A0]Ƿ/1 I<ׯIix]9~8M}[/~jB/YG=iS4R sOmB?yV1I?9~t4h1pcK]i 7$71 QXP[gt 6rF'-sRg`)'yKk-(;+ q猎'bVIhl1*Hb ymXMÄ}CΛZrTւPBvumQ`>/I;[)۶%-E(dtLÇC| h,NrӪ^R?yjI6I~c?H9ѾLd;3<~]s30U:lF}ӧFO?GAbb&crӣq0zi"QBmu`.*C^&.% ݓ,`T3{]nIt% d/,}OHMg/_h{B% DY(LFs9`0~jۘo`a/ iV F!ʎ-j*,[Wi ou`kez^wڻs^oc+0˾vIch\`sܶc=X6=s{] =Jתv+N^jWO-s6Q:ˀ\K>+c;v⪭eJ7dEXW9봚 ՑFk~|ai&%MF?z?nuz&x-l{.PzY.,S7.Z=ϑ.s٭s7uR]z> {:L96dDs%<$=qq!S'(!@]n1S=:mne݄+MrP U*@wfLq IS;B_ḍt aZ d*Q;5F6 /;Lb-o$bhᅪϒ)gcXg!3#eXo4ʢZĚy,52ZlÚڐځa3N5?< 8xAEƾH&^|z3S &m9xBHDЍe*rMFhߠA^iȞ6]un~D=ͯ?Fs ^:7O: l[+\7GC??~} O(NW|ŭ~_"/7\۹Y5_]vȷwWW?;1 2F>/~K 쪺]aPEX3`f?%M].f7pww}^E 08 BS&dW7oGx[ 0"oVn#IA| ?!ݿEGʆtTnᢗ,k[\̛ǡ]rӯȐ\J\#E6v=H>*dB%UUui|X[0-E2zWDܓ^H!w>ƂD & ,/C*]^}[-?er?˛ &n=c#T{Y]͖UkbNֿ~^~([V}]wxӸpq ۵6{߼wjfC5u ٟ:Muo܎U[~ k)7 WHfs+;- ҵ߮PH&-[ȝ|$B |4K@.pG<;Ihi8kav!@wzOlE{{y?w//O[:dfݝvMӣkq0ziQ >#l=70D@oʎʹ gT (ˠAAa`BVt<: g{!kHU\nW5Px6oPظ`+ S$W3 ]f|k ы}5hm^W;AYˡU3/7MxBĥӇ^aEam~|kV'а* Xl׳ޣ?(#rݮ(,n'j=e79TݗRS>1BY5v+ڟܲbOMĦ'w[ȿ[))S6+4y4V^7nE6ɯ8w'v+ tJݦ"n z$z>,䍛MD0Z%cљbpۛ.'ݵ+^(ZajOf$=r9ihm NkdZB!DNz8z=^h4*lw`:ɧʏiE i9Ĺ h3y#B 4y`f:k Z`n2ZZeZ/5㔱Ƌ/R*C/~{]7NK}ؑ6h&=a,ldCΣVJ1Щν[[̕~#r:t*ET3W/+7nE6O?Fz7Nu)Fwntjxo[MȦZn\IJ1H1ox ͑Ѽ[mDևq-)RJ:|u< {Y{di#[١"Kg[;f{`?nDp:Nj 3 GbZZ[;SrRڗNFTJwvUp:3e}JZqy҇⫘x|9L>&xY`*La(z [Jk^!SU1oЈ`̩"1o*}=*fPyVcw lyTeE˗@⟤/XEM/i}#ğLnHӡ!E 1CBQ 9 tؐ-dpI:$ ! 9u!cJz:@Z y!PHWՒ)SX덳+pzP':*{ZshV!?Æ[) t Ux b@eq%,0-ށ1*1x#+Q8jQFWY ci^[,wUhA0))h22L@:?ʩ ~ةA`^S݌O1!QzG!X%A5%IXQ8xpi '}\Ӱϰ*#1HB!%!zeK^_),y$KDQb.A0|HXHEi\kUBVBX$H̕|o kwߊ , pkM.  Té EĘD`$ GcM 11шx%H\tDc)!سVŽǍ:sŠ10lr!y08d{ϐ *(  {Aa߻"tQQ@*\!%M7ΣVȰ&8I;?)\c`H6M"j@ka6c30mɹKzx4~]u:{{P>,䍛hMʅ8Ǝwl]Wлbb:nKﴋn/ޭ y&ZdS1x7\'nN7rtY`ԩϻ,'z>,䍛hwku{-[z58ę@9uRxQ CÐq7WF0zKh;a؋6Yܑ>#}.ȺEHuHoGzz/u!VgO6,e1xҜ깕s >dEg?גwp" *Ij#?{׶GrdˮÚ̈ z``5l,bhӒVfߝټ̬PՑ'#z| vSKqQ/z+KOgsz4>r6W ^͘9N}_c%YػJub0{.S*s~gw´:d]XͧOu~|2??w?yiQ\'ٳ9 H*X= NXOCKG]dXˀhڈk6eSlUX 8.*e3eiXպ1yTaDWm@ZC<'-&l-q(Ւ13ǬZmtFP:3#z @1V s.6Jve,h]r|FQj"G \> $JY}ݵZPF=P +@SN|Z4sQ!2->)'2a+^wmEcVXBYU /Cb/#V!+t3dxh5`zPڜ+IAZOt?I/[N74>S3 *6uM ՗Ze!U}iBmŘ˞xbDb%k^_[>X"j`rP~6hoQnH\Zc|2z=Jco/y"Leb?pEޛ?Ov 15N1 wxs1iج{?AmkTy aӪ!(dУtk\S3`X ᱶ C^ N#/sjQ+gta@R5o.6j̼jI]P}mS]@[RY~3(r.tUʮbY;^DT2bAijXZAV[+_eUD9o i_ucEEP9m-V16bWobL2o/ sUHUIkDW s]V/MhMbYeN2Ƌ~>a_b V~UÜ,Mq缫|LQd\apδekOB^;' cA}BQ~ ^!>VhQ/rޛT=r 1Jt:y?0~)mk~{W_lﮌRF? o>\軄4̂ٴ##Ѐ0SfujA>B8D`JYEv#U AtF~GRv[nc*<_v}+6M7"K1iMk|L4 tM}(8腅q(.;,(C&ʫ]yUknpB[q;V"X-犿qC)*Zq_X-(4J ġԀe@JwVs2J'2'ˡϳm4%N$ɒmO҅ow=zR^S|šw?񩫋(?&}hU7Z#uԑf갵tlUp l4U5?5ZK00`BE!ҍ8sܛg7{'E2kYmF.}nǚjhҘ)}wU^VPS䐌MQ. ntJSE8"BUygLu8N*yT@{כs.gTV8 /^(Ə$:s j${b rasENzҌ7=,Q>A<,l"'hh8l*ʧhO=yq0̛| Nj)ksD-G2vrZ.( -!vNl ,G-5LqXFY1++t(Ҩٴ>s8.E6R=cF<.T ń\ةp}bOXs뮥}mڦ 6i@ڦYp $-FBr,9$& m - zAKeFjlgfƻ=dxuDf _X?(Br63B@g <ݥvvBR~kvyLS#X{_񇘢B8j85̀yDMb^Jb]'B7sǽ d~FRql4PD[֋N1XM0y :j"WڕҠBnj%5khjιkT?HʜNƞ(ǽvdz+@"= 1ܘ* ekoȥ]^Y_+Kܙ>HR % Sc7n'^=m%?uO]]w/lO aO_LqKO^͔H/nۋ͗avcsY|~}c+ȆBmyOG v ݓg|?ȼ#4XX,2VHPmYci4E 9W0R\a/¾H6҇66!{tw TL1"2]7 MJ*LW&]Hp,kZtu^ 幮lE4깘z.[jG}vxr,n2Hl8NpCOqMXgu>( MfWY"~Xg 9?D]bgx<|(uJ)EN8)̘8=#;\Ԍ9zMgyԇ03Θ=vD'? (=LdGGwӯpa!wW! ߛ.k~DYP5Fc F@-)D=gOs7nUDvkwp~tptл2rnnˌB\&_z@k8*dc.9վP…CvwuzudLwuWHoM̏Cww+ǸF]N룋wd}n99MRBy+DFNt]('}ģH7MsSWw鸱cjL(0hCB}{h,M8BrO 9$& ּ^r"5(@?ԚfX@v@dxz7F$c/+uV2KECks./˜{U^׵c+r'TkʆpwnQo$\fLQ!:>Ġ=vZS3SXjl,%M+uva7Bt>D'f8׆8cй5NudtF¸$=^ Ot1xY H jW t490SDQaU+ll^uP^Z"y$6[W;{ 軋_nno0dÇ9-,qhp]d€ gGw(R^U'_Fy오VAR@jjpPzY:("i`7ƤdMSV͈RbV>MQK"o .(*S2/֥C~L ,׺,$M<ۉ)F7?H̠iəJ'/YӠRA \l]N" NWUJ^h,+/|Q8ҩ;MN:k) .On_]mo8+F _E2@>df_f6N)9,Q$B7UEa *\*@RmrlM% QqQ^uKzuBx 5%#WN~J^5i@Bdv& 2@^^ۥWR?ʹR (Œ n%Dj^CTgXrEQ{͵wt=2vun蔼{ใtRQHnC$Mq]ku Y B:Jqj%M@`K }UyFO-",T!p7 RrP>MsKZ~_e}=sTjz=qԴS'ͽ7IѺw+ZHdm/٢?+vlQ~ ֗ch\au^à.K\=2X:S*EEMNbC_#}hh;SuNƒAXjI%)mԒ: Ԃp/ZG^i,R[';Bˊ|#aOjUFy E$[=}lw_xI5j/\ZqBߞ6ߩY'ys[hS\k0o͝=<}2#7֦DZpjyXrCpjx TNwfS2eqBra{M02脾w\lڣw qnm +h8>Bn0qA脾wܪTbo-(ӻLy&PZ*[Rz>ť}.eqG{v3jq$>nt1L9Y6q[f\=ev,LM'!Uicl=ШZ%7mvE,]ތG.D /e${73uK5M^hNh'tQW#cpQX|.Eh!Zަk3)D LdSAg*fRC:o9u `-(guL<-W43[_*j^*=D/G{j(O1$Cm1G?Js͏RݒGi`Q:hc/vԌz7Oh٪$r~Zi[uZև5kfD9 Ѽ4vi }QH7{y ]C;q G&)$\;Eag1%=>!I>J#S;7!3Lb@ѐfvΥ\'Hr* FOAj3Ic&{Vc r». l`R$1̻z<'Ց qBU5RZW:7GIuƫɋ&~=R&# ,xo̐89ιfo-f~Ֆ̫1VW4{9Z֒?ff,Znb YϢ mUf삕v[*@#RJp޸^:iX8m\aP Ѳu^Δ$Ґ -xy qP="*Ԫ!F/`F3ФCOp.-*RrpfQs%wʘ|Ս=AAєgmG=OCԒZml LmVO&ۭն+a~&Z&EңWlt?滜65~<=/҂(%DE5? VNvzZ =<}2# 76p# -o7{uO);)8;k !_9D02OnAx@xA'[6G=z4I_Bratj|wS "v ՁtBQǻhmZ!һ׍xz6CtS3OWG^c^d7 9 n F^7JC)ӹ/d(eԚɑQ @J@|)RkG^7J/4JF"Qz(;U29_qkR?T֨ǸQzrf6,e!CD!RaT3fȘp1LS2f)2F tasSٸ?TRD:_ %ǘ"ƆEJ ct H"c>b 4bF':T2LVe)Qf (AUVR+p1iRkV.51PʹCt(=%CUj^7JX r((\jz׍R+U5J[3Qjzť]C𥧥~ؓZ׍RFPHBRFPꤶ/+qa(FvT>$JVO[5c\z([=b JVO/R+9Q*Mzpt_)JQ{'T Pz+TD4`D0E@(YuU[ШTP.LvOP]06%-mrQ`TƠ ZJ~Mr™(L^ PmdJfF0lTUUІ`r rb;QVP% _Brz}6{3j֡ 3˘tR䨰a@%O5y%%V֩*T{/ +(ȓ_Ͻӳ!liU*˧'¹Xo]ST?~Dj4z0w/1/#BN? 3#6*W7K|=;hQ[R=|zݟDqO%<7YZ 2 TlHIG%ܞscqh Ce}x);b`K~?XA)LDgR ly̻"t͒_;n:#[/Co?o(4ay5,oBCJb EZQd>ZYPcQoS&]*8^0mu/Cաk-#3F7j* ӜvRZFhSbg)hSήf٦` IU2`rr+J)A7>'K6TN9%Zbi%TschD u&$4֪\HhT*䬁ƕN+ DU1TJޖSdPv!2cx^~wd87 M<{r˧N&%c U~{vEdy?hqnnL|E& lV9I-/`!*sk `XiJzVh깛u)R0_UsUZ*PXzŦR<`n-ZCfPdPm# ZF[|d{/iJH[%$[HX帣˝id<];CɖZgxсiHvs[%Z'@>FI6:ꕙH]P*սnM}.ѐ[6?fo!wp"ۛۛwݟʟ>zv[BsȖ<ɭ7yH-ղp0fZ>8T0EB> hopSZ4}OՒs!Bk*I6gN6GN?ө(\%]QQdQQn(O.nBEJ&nOZ7܆58hyl.HV:< Yi+YeTtIMFse2-Pzn٢Hsgm{:fr:Bqx˄ASe (>bzߌUSpώ8y2lV?Xb w;L69p"~p3XƆBCG 7l!>\K02`Nj b5(g_NV8 Kۄ:'o:L4BhT\8E~*)B᧹ fZT*8ol>E* ti@{JfVBY3Mgiz;kz7w[oQNjA,}bih-7PaS$D"IC Ky4R?u=} u?_DyФ RJBG`6e/)q|JOfڇ!I Q'\R\6&h;^3 PHXsBI5hXف,LXKXYK" k^+u( X@}?$ oD+d7$@"FjeGޤ? [KgHݿ&znqkm8zϣ`qٌ|bga/jFF ^uFs]t"z4B '{tWP6UWb(by+.0(ikxf)&E)L?/K&a˷dlX~VY|eB Gڤ16pY`9o062dT Y-1Y) cÝ,nlU x찾H/Q/vƒR:&1<Hi 5z´¶ o I DY"790t$ oRͦ# p0ƒB?iaQ.SUǸ-y0+WbDVڧ]m\:AB0mev7Ʉbs7 VWNc2˽7p {%D8{ͷh!ʇYvy&seÝ~.} {mC`Ůw kxF>Xm_ejE[9e^;2`C.2$2mXl"i>c")-4Tbj/Bh[x09< d3oÃdB;5 ɧv PD4- U #&\օDʗ9J9[%4&C7Pw @)4o0o-,oyةzVAI/KtG>T~D[K[8cۂo_>6ɂ co-Z竳_~X+1Ng^jݕ+n!l^xl43PvDXjw#qR;P(}ժt{4J`rVfO ~},~kU'9%+(6>wS4`in ]LmhղR~%Rշ 2=ʞ$-x’π/IL[SbHKCK DfZIFz-I_ x(}VY홰m}h#ވlUT&AmtBmUY]wIj]MwQ/O^F.xBN'y~|^'A`B[p#L@QxsQ*=d2 9FqUȽ$Y;G   0^\ DMf~sQ"R‚`Kf@ 6XU^z.$_hFJȳ6Qv\x sͭ\vlb%JӷY~,Vg ?әW$14Ii,}KA6o6H 㜳Ycoc1@XMV'"i]Ԉ%ZA!x6V;E_pѓp@%@eCk'*O-Som1[LPSt~1[:^Vp|:Hvnc0)tʔ#$wuOHd6}԰SNwۻ޴a(WO4UkW:&eHU6utߟ0jcv1R^_wwXCc!G)X^9sq[0IUQV-tPY.!p&,;_tf oZSI+o!Le<.WKg_'bW*5G-PtB"}IPSԑ&)sȻ?͚l7?&7EnyNU6_1Y壕mS:]jb5YFv*j]O*u sT/|dCۗT-u9yO% 5j1vN?{j3U[%\9Ec8zOso|=[l |{upL@-~+h 8g B"cEs[U3WZ`ȕS1$aexYs>p2<ado~2(id'Be|%IUaXǛ 2>tL>͖8g4 t0;ߚa!@\e|a儸.!5sӽ?E 52xP_ZoMakK ͶߟͫyY˽+]K,OKMf>IEkRb&[#xwZ-C}Cɷy:\oN-k M筣\Gґ{VTB#A{?Z8_N,1K-zT=ΰl0uk3KRdR`)e~,-QkBabY,k8Ԕ3򔩄)JթBLi#)x(7!8f?jJY ]Bf) IҨ3-L`,"ׂYҔg"W+$5Lfev%QGDH5)¨KA*øoCDMX%n_var/home/core/zuul-output/logs/kubelet.log0000644000000000000000007032017715134437432017712 0ustar rootrootJan 22 11:54:51 crc systemd[1]: Starting Kubernetes Kubelet... Jan 22 11:54:51 crc restorecon[4741]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:51 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 22 11:54:52 crc restorecon[4741]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 22 11:54:52 crc kubenswrapper[4773]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 11:54:52 crc kubenswrapper[4773]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 22 11:54:52 crc kubenswrapper[4773]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 11:54:52 crc kubenswrapper[4773]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 11:54:52 crc kubenswrapper[4773]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 22 11:54:52 crc kubenswrapper[4773]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.528618 4773 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531378 4773 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531397 4773 feature_gate.go:330] unrecognized feature gate: Example Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531402 4773 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531407 4773 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531411 4773 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531415 4773 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531419 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531423 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531427 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531430 4773 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531434 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531439 4773 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531445 4773 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531451 4773 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531456 4773 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531461 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531466 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531471 4773 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531476 4773 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531480 4773 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531485 4773 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531502 4773 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531508 4773 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531512 4773 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531516 4773 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531520 4773 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531525 4773 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531530 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531533 4773 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531537 4773 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531541 4773 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531545 4773 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531549 4773 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531553 4773 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531557 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531562 4773 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531567 4773 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531571 4773 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531575 4773 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531578 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531582 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531586 4773 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531589 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531593 4773 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531597 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531603 4773 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531607 4773 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531611 4773 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531615 4773 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531619 4773 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531623 4773 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531627 4773 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531631 4773 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531635 4773 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531639 4773 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531643 4773 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531646 4773 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531651 4773 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531654 4773 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531658 4773 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531661 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531664 4773 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531668 4773 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531671 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531674 4773 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531678 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531681 4773 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531684 4773 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531688 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531691 4773 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.531695 4773 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531919 4773 flags.go:64] FLAG: --address="0.0.0.0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531929 4773 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531937 4773 flags.go:64] FLAG: --anonymous-auth="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531943 4773 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531949 4773 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531955 4773 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531962 4773 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531969 4773 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531974 4773 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531979 4773 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531985 4773 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531991 4773 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.531996 4773 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532001 4773 flags.go:64] FLAG: --cgroup-root="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532007 4773 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532012 4773 flags.go:64] FLAG: --client-ca-file="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532018 4773 flags.go:64] FLAG: --cloud-config="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532023 4773 flags.go:64] FLAG: --cloud-provider="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532027 4773 flags.go:64] FLAG: --cluster-dns="[]" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532033 4773 flags.go:64] FLAG: --cluster-domain="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532037 4773 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532041 4773 flags.go:64] FLAG: --config-dir="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532045 4773 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532050 4773 flags.go:64] FLAG: --container-log-max-files="5" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532056 4773 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532060 4773 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532064 4773 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532068 4773 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532072 4773 flags.go:64] FLAG: --contention-profiling="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532076 4773 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532080 4773 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532085 4773 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532088 4773 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532094 4773 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532098 4773 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532102 4773 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532105 4773 flags.go:64] FLAG: --enable-load-reader="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532110 4773 flags.go:64] FLAG: --enable-server="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532114 4773 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532119 4773 flags.go:64] FLAG: --event-burst="100" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532123 4773 flags.go:64] FLAG: --event-qps="50" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532127 4773 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532131 4773 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532135 4773 flags.go:64] FLAG: --eviction-hard="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532140 4773 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532144 4773 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532148 4773 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532152 4773 flags.go:64] FLAG: --eviction-soft="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532156 4773 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532161 4773 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532164 4773 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532168 4773 flags.go:64] FLAG: --experimental-mounter-path="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532172 4773 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532176 4773 flags.go:64] FLAG: --fail-swap-on="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532180 4773 flags.go:64] FLAG: --feature-gates="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532185 4773 flags.go:64] FLAG: --file-check-frequency="20s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532189 4773 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532193 4773 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532198 4773 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532202 4773 flags.go:64] FLAG: --healthz-port="10248" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532206 4773 flags.go:64] FLAG: --help="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532210 4773 flags.go:64] FLAG: --hostname-override="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532214 4773 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532219 4773 flags.go:64] FLAG: --http-check-frequency="20s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532223 4773 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532227 4773 flags.go:64] FLAG: --image-credential-provider-config="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532231 4773 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532235 4773 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532239 4773 flags.go:64] FLAG: --image-service-endpoint="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532243 4773 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532247 4773 flags.go:64] FLAG: --kube-api-burst="100" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532251 4773 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532255 4773 flags.go:64] FLAG: --kube-api-qps="50" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532259 4773 flags.go:64] FLAG: --kube-reserved="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532263 4773 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532267 4773 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532271 4773 flags.go:64] FLAG: --kubelet-cgroups="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532275 4773 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532295 4773 flags.go:64] FLAG: --lock-file="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532300 4773 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532305 4773 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532309 4773 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532315 4773 flags.go:64] FLAG: --log-json-split-stream="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532319 4773 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532323 4773 flags.go:64] FLAG: --log-text-split-stream="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532327 4773 flags.go:64] FLAG: --logging-format="text" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532331 4773 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532335 4773 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532339 4773 flags.go:64] FLAG: --manifest-url="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532343 4773 flags.go:64] FLAG: --manifest-url-header="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532349 4773 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532353 4773 flags.go:64] FLAG: --max-open-files="1000000" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532358 4773 flags.go:64] FLAG: --max-pods="110" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532362 4773 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532366 4773 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532371 4773 flags.go:64] FLAG: --memory-manager-policy="None" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532375 4773 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532379 4773 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532383 4773 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532387 4773 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532396 4773 flags.go:64] FLAG: --node-status-max-images="50" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532400 4773 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532404 4773 flags.go:64] FLAG: --oom-score-adj="-999" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532408 4773 flags.go:64] FLAG: --pod-cidr="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532412 4773 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532419 4773 flags.go:64] FLAG: --pod-manifest-path="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532423 4773 flags.go:64] FLAG: --pod-max-pids="-1" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532427 4773 flags.go:64] FLAG: --pods-per-core="0" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532430 4773 flags.go:64] FLAG: --port="10250" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532434 4773 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532438 4773 flags.go:64] FLAG: --provider-id="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532442 4773 flags.go:64] FLAG: --qos-reserved="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532447 4773 flags.go:64] FLAG: --read-only-port="10255" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532451 4773 flags.go:64] FLAG: --register-node="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532455 4773 flags.go:64] FLAG: --register-schedulable="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532459 4773 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532466 4773 flags.go:64] FLAG: --registry-burst="10" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532470 4773 flags.go:64] FLAG: --registry-qps="5" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532473 4773 flags.go:64] FLAG: --reserved-cpus="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532478 4773 flags.go:64] FLAG: --reserved-memory="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532483 4773 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532487 4773 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532491 4773 flags.go:64] FLAG: --rotate-certificates="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532495 4773 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532499 4773 flags.go:64] FLAG: --runonce="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532503 4773 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532507 4773 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532511 4773 flags.go:64] FLAG: --seccomp-default="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532515 4773 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532519 4773 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532525 4773 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532529 4773 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532533 4773 flags.go:64] FLAG: --storage-driver-password="root" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532537 4773 flags.go:64] FLAG: --storage-driver-secure="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532541 4773 flags.go:64] FLAG: --storage-driver-table="stats" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532545 4773 flags.go:64] FLAG: --storage-driver-user="root" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532549 4773 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532553 4773 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532557 4773 flags.go:64] FLAG: --system-cgroups="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532561 4773 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532567 4773 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532571 4773 flags.go:64] FLAG: --tls-cert-file="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532575 4773 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532581 4773 flags.go:64] FLAG: --tls-min-version="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532584 4773 flags.go:64] FLAG: --tls-private-key-file="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532588 4773 flags.go:64] FLAG: --topology-manager-policy="none" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532592 4773 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532597 4773 flags.go:64] FLAG: --topology-manager-scope="container" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532601 4773 flags.go:64] FLAG: --v="2" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532607 4773 flags.go:64] FLAG: --version="false" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532612 4773 flags.go:64] FLAG: --vmodule="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532617 4773 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.532621 4773 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533036 4773 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533045 4773 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533050 4773 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533053 4773 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533057 4773 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533062 4773 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533066 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533070 4773 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533075 4773 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533079 4773 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533083 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533086 4773 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533090 4773 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533094 4773 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533098 4773 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533101 4773 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533105 4773 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533109 4773 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533112 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533116 4773 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533121 4773 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533125 4773 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533129 4773 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533133 4773 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533136 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533140 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533145 4773 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533148 4773 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533152 4773 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533155 4773 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533159 4773 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533162 4773 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533166 4773 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533169 4773 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533173 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533177 4773 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533182 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533186 4773 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533190 4773 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533193 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533198 4773 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533202 4773 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533206 4773 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533210 4773 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533213 4773 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533217 4773 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533220 4773 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533224 4773 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533227 4773 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533232 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533235 4773 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533239 4773 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533242 4773 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533246 4773 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533249 4773 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533252 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533256 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533260 4773 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533263 4773 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533267 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533270 4773 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533274 4773 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533313 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533319 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533323 4773 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533327 4773 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533331 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533335 4773 feature_gate.go:330] unrecognized feature gate: Example Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533339 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533343 4773 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.533349 4773 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.533355 4773 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.542209 4773 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.542231 4773 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542875 4773 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542910 4773 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542914 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542919 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542925 4773 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542929 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542933 4773 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542937 4773 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542940 4773 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542944 4773 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542949 4773 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542952 4773 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542958 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542965 4773 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542971 4773 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542975 4773 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542980 4773 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542985 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542988 4773 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542992 4773 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.542997 4773 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543001 4773 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543005 4773 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543009 4773 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543013 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543017 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543022 4773 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543026 4773 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543030 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543034 4773 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543037 4773 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543041 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543045 4773 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543049 4773 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543053 4773 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543057 4773 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543060 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543064 4773 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543068 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543072 4773 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543075 4773 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543079 4773 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543083 4773 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543087 4773 feature_gate.go:330] unrecognized feature gate: Example Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543091 4773 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543095 4773 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543098 4773 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543102 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543107 4773 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543112 4773 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543117 4773 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543122 4773 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543127 4773 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543132 4773 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543135 4773 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543139 4773 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543143 4773 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543147 4773 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543151 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543154 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543158 4773 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543162 4773 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543165 4773 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543168 4773 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543173 4773 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543181 4773 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543191 4773 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543198 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543204 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543209 4773 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543214 4773 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.543223 4773 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543375 4773 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543380 4773 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543386 4773 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543392 4773 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543397 4773 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543401 4773 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543405 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543409 4773 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543413 4773 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543417 4773 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543421 4773 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543425 4773 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543429 4773 feature_gate.go:330] unrecognized feature gate: Example Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543433 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543436 4773 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543440 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543443 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543447 4773 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543450 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543453 4773 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543457 4773 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543461 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543464 4773 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543468 4773 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543471 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543475 4773 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543478 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543482 4773 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543485 4773 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543489 4773 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543492 4773 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543496 4773 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543500 4773 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543505 4773 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543510 4773 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543514 4773 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543518 4773 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543522 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543526 4773 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543530 4773 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543533 4773 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543537 4773 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543540 4773 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543544 4773 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543548 4773 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543552 4773 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543556 4773 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543559 4773 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543563 4773 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543567 4773 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543570 4773 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543574 4773 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543578 4773 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543581 4773 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543585 4773 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543589 4773 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543594 4773 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543598 4773 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543602 4773 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543605 4773 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543609 4773 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543614 4773 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543618 4773 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543622 4773 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543627 4773 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543631 4773 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543635 4773 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543638 4773 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543642 4773 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543645 4773 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.543649 4773 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.543655 4773 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.544095 4773 server.go:940] "Client rotation is on, will bootstrap in background" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.546782 4773 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.546864 4773 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.547344 4773 server.go:997] "Starting client certificate rotation" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.547366 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.547663 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-28 09:17:58.379003566 +0000 UTC Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.547731 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.551651 4773 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.552734 4773 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.554401 4773 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.562673 4773 log.go:25] "Validated CRI v1 runtime API" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.581914 4773 log.go:25] "Validated CRI v1 image API" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.583645 4773 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.586239 4773 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-22-11-49-36-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.586264 4773 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.600102 4773 manager.go:217] Machine: {Timestamp:2026-01-22 11:54:52.598888257 +0000 UTC m=+0.177004102 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:52b2f704-3175-40e6-96da-8c8b45b50226 BootID:fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:cb:ca:73 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:cb:ca:73 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:ee:8d:ea Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:98:87:e6 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:fe:f9:a5 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:c7:ca:3e Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:66:52:20 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:76:9e:4e:6c:05:52 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:da:e3:94:57:43:cc Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.600317 4773 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.600442 4773 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.600977 4773 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601135 4773 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601163 4773 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601390 4773 topology_manager.go:138] "Creating topology manager with none policy" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601401 4773 container_manager_linux.go:303] "Creating device plugin manager" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601626 4773 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601661 4773 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.601935 4773 state_mem.go:36] "Initialized new in-memory state store" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.602114 4773 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.602640 4773 kubelet.go:418] "Attempting to sync node with API server" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.602661 4773 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.602684 4773 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.602698 4773 kubelet.go:324] "Adding apiserver pod source" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.602710 4773 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.604545 4773 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.604686 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.604761 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.604886 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.604976 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.605004 4773 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606106 4773 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606718 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606753 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606765 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606778 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606798 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606811 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606826 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606841 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606851 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606861 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606890 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.606899 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.607085 4773 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.607574 4773 server.go:1280] "Started kubelet" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.607888 4773 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.607970 4773 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.607977 4773 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.608958 4773 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 22 11:54:52 crc systemd[1]: Started Kubernetes Kubelet. Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.609717 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.609739 4773 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.610171 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 23:56:32.263805738 +0000 UTC Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.614352 4773 server.go:460] "Adding debug handlers to kubelet server" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.614959 4773 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.615266 4773 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.615307 4773 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.615466 4773 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.615689 4773 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.97:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188d0b849017e87e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 11:54:52.607547518 +0000 UTC m=+0.185663353,LastTimestamp:2026-01-22 11:54:52.607547518 +0000 UTC m=+0.185663353,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.616509 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="200ms" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.617212 4773 factory.go:55] Registering systemd factory Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.617414 4773 factory.go:221] Registration of the systemd container factory successfully Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.618061 4773 factory.go:153] Registering CRI-O factory Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.618084 4773 factory.go:221] Registration of the crio container factory successfully Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.618141 4773 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.618161 4773 factory.go:103] Registering Raw factory Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.618184 4773 manager.go:1196] Started watching for new ooms in manager Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.618694 4773 manager.go:319] Starting recovery of all containers Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.619622 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.619744 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623471 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623553 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623572 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623588 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623603 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623619 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623633 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623651 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623670 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623684 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623697 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623710 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623725 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623741 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623754 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623769 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623784 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623797 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623813 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623829 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623845 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623862 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623876 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623890 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623903 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623918 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623937 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.623986 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624025 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624045 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624075 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624091 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624150 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624162 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624177 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624191 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624206 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624220 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624235 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624249 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624262 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624301 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624317 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624331 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624381 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624398 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624415 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624429 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624444 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624467 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624490 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624512 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624543 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624563 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624584 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624609 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624631 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624657 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624678 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624697 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624716 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624734 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624759 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624782 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624801 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624826 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624846 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624870 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624892 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.624913 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625061 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625091 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625112 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625131 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625149 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625164 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625179 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625198 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625224 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.625275 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626456 4773 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626497 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626520 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626539 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626561 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626581 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626601 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626618 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626637 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626661 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626682 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626699 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626714 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626733 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626756 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626775 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626792 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626809 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626839 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626858 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626874 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626891 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626914 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626933 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626950 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626978 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.626997 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627019 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627039 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627056 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627074 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627089 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627105 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627118 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627132 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627146 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627161 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627175 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627193 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627208 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627225 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627240 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627254 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627269 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627308 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627324 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627341 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627358 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627372 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627386 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627402 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627419 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627431 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627448 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627467 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627486 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627501 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627516 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627532 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627548 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627564 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627578 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627597 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627613 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627629 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627644 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627658 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627672 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627687 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627704 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627721 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627735 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627751 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627767 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627782 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627799 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627814 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627831 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627849 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627868 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627886 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627906 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627921 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627937 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627949 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627965 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.627984 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628002 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628021 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628042 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628061 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628078 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628098 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628119 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628141 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628161 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628181 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628200 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628218 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628237 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628256 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628270 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628306 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628321 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628335 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628351 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628363 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628377 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628389 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628403 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628419 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628431 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628454 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628471 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628490 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628506 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628519 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628532 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628545 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628559 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628572 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628585 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628600 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628614 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628628 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628641 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628657 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628670 4773 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628682 4773 reconstruct.go:97] "Volume reconstruction finished" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.628691 4773 reconciler.go:26] "Reconciler: start to sync state" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.638639 4773 manager.go:324] Recovery completed Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.647834 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.650012 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.650051 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.650062 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.650892 4773 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.650907 4773 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.650924 4773 state_mem.go:36] "Initialized new in-memory state store" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.655380 4773 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.656724 4773 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.656761 4773 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.656782 4773 kubelet.go:2335] "Starting kubelet main sync loop" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.656826 4773 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 22 11:54:52 crc kubenswrapper[4773]: W0122 11:54:52.658909 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.659018 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.661777 4773 policy_none.go:49] "None policy: Start" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.664322 4773 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.664353 4773 state_mem.go:35] "Initializing new in-memory state store" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.715667 4773 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.720539 4773 manager.go:334] "Starting Device Plugin manager" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.720602 4773 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.720616 4773 server.go:79] "Starting device plugin registration server" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.721054 4773 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.721071 4773 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.721261 4773 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.721359 4773 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.721368 4773 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.727764 4773 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.756923 4773 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.757705 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.758653 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.758698 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.758709 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.758814 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759216 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759345 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759419 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759455 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759468 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759609 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759755 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.759823 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761019 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761035 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761057 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761045 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761092 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761073 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761346 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761174 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761844 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761868 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.761867 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762018 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762208 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762238 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762252 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762427 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762513 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762542 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762658 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762686 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.762706 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763316 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763345 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763355 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763579 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763611 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763693 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763801 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.763938 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.764759 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.764793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.764804 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.817192 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="400ms" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.821273 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.822582 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.822618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.822631 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.822655 4773 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 11:54:52 crc kubenswrapper[4773]: E0122 11:54:52.823185 4773 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.97:6443: connect: connection refused" node="crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.832777 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.832837 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.832869 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.832915 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.832985 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833010 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833042 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833066 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833090 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833144 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833177 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833193 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833212 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833244 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.833412 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935056 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935111 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935129 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935149 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935166 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935182 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935197 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935211 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935226 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935243 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935258 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935330 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935358 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935375 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935391 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935759 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935889 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935760 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935899 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935911 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935952 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935935 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935975 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935942 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935998 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.935793 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.936047 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:52 crc kubenswrapper[4773]: I0122 11:54:52.936114 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.024107 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.025473 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.025503 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.025512 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.025627 4773 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.026226 4773 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.97:6443: connect: connection refused" node="crc" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.096225 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.120164 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.127057 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-af31e854052fb4fca9a0f6270b8fba3f31ba7be65c009ffd342fb71550f11db1 WatchSource:0}: Error finding container af31e854052fb4fca9a0f6270b8fba3f31ba7be65c009ffd342fb71550f11db1: Status 404 returned error can't find the container with id af31e854052fb4fca9a0f6270b8fba3f31ba7be65c009ffd342fb71550f11db1 Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.140580 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-3f5fe0fe644f3a89ad14f23c22a4a7a1c370e0868732463f9a7685a125521788 WatchSource:0}: Error finding container 3f5fe0fe644f3a89ad14f23c22a4a7a1c370e0868732463f9a7685a125521788: Status 404 returned error can't find the container with id 3f5fe0fe644f3a89ad14f23c22a4a7a1c370e0868732463f9a7685a125521788 Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.146673 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.173278 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.178151 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.187209 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-5efbd3122bc28bc4db0f18b5f228062efaf441052e76e449ed3eb6f7308182db WatchSource:0}: Error finding container 5efbd3122bc28bc4db0f18b5f228062efaf441052e76e449ed3eb6f7308182db: Status 404 returned error can't find the container with id 5efbd3122bc28bc4db0f18b5f228062efaf441052e76e449ed3eb6f7308182db Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.198236 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-385ae2e604b7a2bbe39af53b9aff5d3d5ab43aaa13d41a60b5eaaded31d1bc28 WatchSource:0}: Error finding container 385ae2e604b7a2bbe39af53b9aff5d3d5ab43aaa13d41a60b5eaaded31d1bc28: Status 404 returned error can't find the container with id 385ae2e604b7a2bbe39af53b9aff5d3d5ab43aaa13d41a60b5eaaded31d1bc28 Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.204006 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-945b7351ec6b788dedfe649f1f10bd1127ac564d90c8cdfa150a8a1e5167aee5 WatchSource:0}: Error finding container 945b7351ec6b788dedfe649f1f10bd1127ac564d90c8cdfa150a8a1e5167aee5: Status 404 returned error can't find the container with id 945b7351ec6b788dedfe649f1f10bd1127ac564d90c8cdfa150a8a1e5167aee5 Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.218710 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="800ms" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.426743 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.428653 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.428706 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.428719 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.428752 4773 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.429410 4773 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.97:6443: connect: connection refused" node="crc" Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.488933 4773 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.97:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188d0b849017e87e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 11:54:52.607547518 +0000 UTC m=+0.185663353,LastTimestamp:2026-01-22 11:54:52.607547518 +0000 UTC m=+0.185663353,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.609243 4773 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.611381 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 03:12:42.998165431 +0000 UTC Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.645711 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.645804 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.664366 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b" exitCode=0 Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.664503 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.664692 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"945b7351ec6b788dedfe649f1f10bd1127ac564d90c8cdfa150a8a1e5167aee5"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.664857 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666019 4773 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f39d4c4e45747f387a446ecd8aed128bdf2c0e89230279b1ff8dc30cf5be7376" exitCode=0 Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666116 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f39d4c4e45747f387a446ecd8aed128bdf2c0e89230279b1ff8dc30cf5be7376"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666150 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"385ae2e604b7a2bbe39af53b9aff5d3d5ab43aaa13d41a60b5eaaded31d1bc28"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666188 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666220 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666229 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.666306 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.668057 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.668697 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.668783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.668794 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.668983 4773 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297" exitCode=0 Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.669057 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.669105 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"5efbd3122bc28bc4db0f18b5f228062efaf441052e76e449ed3eb6f7308182db"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.669216 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.669707 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.669734 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.669742 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.670312 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.670338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.670348 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.672254 4773 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94" exitCode=0 Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.672531 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.672559 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3f5fe0fe644f3a89ad14f23c22a4a7a1c370e0868732463f9a7685a125521788"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.672661 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.674891 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.674940 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.674960 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.677986 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa"} Jan 22 11:54:53 crc kubenswrapper[4773]: I0122 11:54:53.678022 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"af31e854052fb4fca9a0f6270b8fba3f31ba7be65c009ffd342fb71550f11db1"} Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.779347 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.779471 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:53 crc kubenswrapper[4773]: W0122 11:54:53.890078 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:53 crc kubenswrapper[4773]: E0122 11:54:53.890183 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:54 crc kubenswrapper[4773]: W0122 11:54:54.009802 4773 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:54 crc kubenswrapper[4773]: E0122 11:54:54.009886 4773 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:54 crc kubenswrapper[4773]: E0122 11:54:54.020347 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="1.6s" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.230053 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.231206 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.231246 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.231259 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.231302 4773 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 11:54:54 crc kubenswrapper[4773]: E0122 11:54:54.231724 4773 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.97:6443: connect: connection refused" node="crc" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.609189 4773 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.97:6443: connect: connection refused Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.612207 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 05:31:50.793548795 +0000 UTC Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.658253 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 22 11:54:54 crc kubenswrapper[4773]: E0122 11:54:54.659139 4773 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.97:6443: connect: connection refused" logger="UnhandledError" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.684737 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.684844 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.686110 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.686143 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.686151 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.688598 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.688636 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.688659 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.688743 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.689730 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.689759 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.689771 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.691912 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.691937 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.691950 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.691952 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.692564 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.692591 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.692600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.695033 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.695057 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.695070 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.695083 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.696768 4773 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2e4cc13d3c6fda26bcee591bf368a92e4a48cbf7f07b28770096d6b6fd108e9d" exitCode=0 Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.696793 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2e4cc13d3c6fda26bcee591bf368a92e4a48cbf7f07b28770096d6b6fd108e9d"} Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.696889 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.697626 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.697668 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:54 crc kubenswrapper[4773]: I0122 11:54:54.697681 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.612392 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 02:00:06.255949264 +0000 UTC Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.704870 4773 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="08bf0d37717702797a841f7c4f8dc0fd718ff004f7bf20b8db6972a8f20bf6d2" exitCode=0 Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.704985 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"08bf0d37717702797a841f7c4f8dc0fd718ff004f7bf20b8db6972a8f20bf6d2"} Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.705212 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.707054 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.707100 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.707113 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.711984 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.712041 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.712024 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8"} Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.713241 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.713264 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.713278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.713436 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.713499 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.713519 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.832210 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.834415 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.834489 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.834509 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:55 crc kubenswrapper[4773]: I0122 11:54:55.834550 4773 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.091844 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.092031 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.093330 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.093445 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.093517 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.301153 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.307047 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.612600 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 19:07:31.104572665 +0000 UTC Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719046 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ef1359a51644cf623e0757795c0f266e17e050d16458537b4fcb8aa6a6e5ca48"} Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719102 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ef50e57c63675b69abaf30febe975419e6c7af8e5b4f8a4c3eecc217298ac89a"} Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719117 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2451cd9c5a51ee6e54ab04b64027b14fcf66b4bba836cc0a1edacc528ff0b339"} Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719130 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f0854934283dc0254d0ade1988f140ca2668ae867993362e49f401425d5c7b3e"} Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719140 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dd2112bf78965ec4cf037c4136f79954cc27c27a8897d7f949330d98a1e2ffca"} Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719195 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719222 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719338 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.719424 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720187 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720217 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720228 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720239 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720270 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720314 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720855 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720900 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:56 crc kubenswrapper[4773]: I0122 11:54:56.720925 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.169707 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.479213 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.613394 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 07:05:53.718635848 +0000 UTC Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.721336 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.721394 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.721441 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.721335 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722716 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722728 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722666 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722854 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722868 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722971 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722984 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:57 crc kubenswrapper[4773]: I0122 11:54:57.722994 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.082791 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.159208 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.614435 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 06:53:31.118460409 +0000 UTC Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.723526 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.723526 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.724642 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.724671 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.724681 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.724687 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.724727 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.724740 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.932185 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:54:58 crc kubenswrapper[4773]: I0122 11:54:58.951440 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.615104 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 17:39:47.445816174 +0000 UTC Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.630710 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.630979 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.632470 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.632523 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.632536 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.725958 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.726787 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.726834 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:54:59 crc kubenswrapper[4773]: I0122 11:54:59.726849 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.170687 4773 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.170766 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.615727 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 07:15:45.389689468 +0000 UTC Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.643177 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.643373 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.644889 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.644942 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.644954 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.727880 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.728794 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.728842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:00 crc kubenswrapper[4773]: I0122 11:55:00.728856 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:01 crc kubenswrapper[4773]: I0122 11:55:01.616374 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 12:22:17.521486865 +0000 UTC Jan 22 11:55:02 crc kubenswrapper[4773]: I0122 11:55:02.616763 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 00:40:06.961471776 +0000 UTC Jan 22 11:55:02 crc kubenswrapper[4773]: E0122 11:55:02.728129 4773 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 22 11:55:03 crc kubenswrapper[4773]: I0122 11:55:03.617677 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 10:32:50.929977607 +0000 UTC Jan 22 11:55:04 crc kubenswrapper[4773]: I0122 11:55:04.618763 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 16:55:46.778947957 +0000 UTC Jan 22 11:55:04 crc kubenswrapper[4773]: I0122 11:55:04.969320 4773 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 22 11:55:04 crc kubenswrapper[4773]: I0122 11:55:04.969872 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 22 11:55:05 crc kubenswrapper[4773]: I0122 11:55:05.501935 4773 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 22 11:55:05 crc kubenswrapper[4773]: I0122 11:55:05.502257 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 22 11:55:05 crc kubenswrapper[4773]: I0122 11:55:05.619270 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 05:13:43.642265347 +0000 UTC Jan 22 11:55:05 crc kubenswrapper[4773]: I0122 11:55:05.648612 4773 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]log ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]etcd ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/openshift.io-api-request-count-filter ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/openshift.io-startkubeinformers ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/generic-apiserver-start-informers ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/priority-and-fairness-config-consumer ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/priority-and-fairness-filter ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-apiextensions-informers ok Jan 22 11:55:05 crc kubenswrapper[4773]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Jan 22 11:55:05 crc kubenswrapper[4773]: [-]poststarthook/crd-informer-synced failed: reason withheld Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-system-namespaces-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-cluster-authentication-info-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-legacy-token-tracking-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-service-ip-repair-controllers ok Jan 22 11:55:05 crc kubenswrapper[4773]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Jan 22 11:55:05 crc kubenswrapper[4773]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/priority-and-fairness-config-producer ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/bootstrap-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/start-kube-aggregator-informers ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-status-local-available-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-status-remote-available-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-registration-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-wait-for-first-sync ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-discovery-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/kube-apiserver-autoregistration ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]autoregister-completion ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-openapi-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: [+]poststarthook/apiservice-openapiv3-controller ok Jan 22 11:55:05 crc kubenswrapper[4773]: livez check failed Jan 22 11:55:05 crc kubenswrapper[4773]: I0122 11:55:05.650009 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:55:06 crc kubenswrapper[4773]: I0122 11:55:06.620821 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 16:58:22.931559687 +0000 UTC Jan 22 11:55:07 crc kubenswrapper[4773]: I0122 11:55:07.621340 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 01:28:53.362667493 +0000 UTC Jan 22 11:55:08 crc kubenswrapper[4773]: I0122 11:55:08.088448 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:55:08 crc kubenswrapper[4773]: I0122 11:55:08.088796 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:55:08 crc kubenswrapper[4773]: I0122 11:55:08.091402 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:08 crc kubenswrapper[4773]: I0122 11:55:08.091468 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:08 crc kubenswrapper[4773]: I0122 11:55:08.091484 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:08 crc kubenswrapper[4773]: I0122 11:55:08.622011 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 08:04:04.469679996 +0000 UTC Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.622967 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 18:44:35.064236177 +0000 UTC Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.655888 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.656310 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.657560 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.657897 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.658021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.666521 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.749441 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.751116 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.751153 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:09 crc kubenswrapper[4773]: I0122 11:55:09.751166 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.171157 4773 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.171219 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.498572 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.501115 4773 trace.go:236] Trace[185035141]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 11:54:56.817) (total time: 13683ms): Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[185035141]: ---"Objects listed" error: 13683ms (11:55:10.501) Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[185035141]: [13.683892777s] [13.683892777s] END Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.501143 4773 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.502123 4773 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.504672 4773 trace.go:236] Trace[141949731]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 11:54:55.848) (total time: 14655ms): Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[141949731]: ---"Objects listed" error: 14655ms (11:55:10.504) Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[141949731]: [14.655800042s] [14.655800042s] END Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.504878 4773 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.504681 4773 trace.go:236] Trace[1720062069]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 11:54:56.583) (total time: 13920ms): Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[1720062069]: ---"Objects listed" error: 13920ms (11:55:10.504) Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[1720062069]: [13.920665118s] [13.920665118s] END Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.504986 4773 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.505953 4773 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.506561 4773 trace.go:236] Trace[1126645387]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 11:54:56.559) (total time: 13946ms): Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[1126645387]: ---"Objects listed" error: 13946ms (11:55:10.506) Jan 22 11:55:10 crc kubenswrapper[4773]: Trace[1126645387]: [13.946989438s] [13.946989438s] END Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.506597 4773 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.538201 4773 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.567551 4773 csr.go:261] certificate signing request csr-lr856 is approved, waiting to be issued Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.582153 4773 csr.go:257] certificate signing request csr-lr856 is issued Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.615688 4773 apiserver.go:52] "Watching apiserver" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.621125 4773 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.621407 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.621776 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.621889 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.621965 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.622012 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.622059 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.622139 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.622844 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.622847 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.623441 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.623108 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 15:20:58.116341857 +0000 UTC Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.625308 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.625708 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.625990 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.626767 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.627317 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.627525 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.631486 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.632647 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.632669 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.648261 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.652823 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.668217 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.672202 4773 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40016->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.672316 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40016->192.168.126.11:17697: read: connection reset by peer" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.672775 4773 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.672832 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.682407 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.700953 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.712403 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.717090 4773 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.726464 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.729496 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.741942 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.753637 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.753965 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.755529 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8" exitCode=255 Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.755593 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8"} Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.756229 4773 scope.go:117] "RemoveContainer" containerID="40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.767326 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.785124 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.794829 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.801917 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-shp5z"] Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.802252 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.803881 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.804187 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.804232 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809152 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809196 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809222 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809243 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809264 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809322 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809346 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809365 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809387 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809412 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809433 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809452 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809474 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809498 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809524 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809546 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809567 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809593 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809615 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809636 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809658 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809681 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809702 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809736 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809760 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809782 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809804 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809824 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809845 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809865 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809896 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809919 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809939 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809958 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809979 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.809997 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810017 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810038 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810056 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810074 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810094 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810115 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810135 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810235 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810269 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810339 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810363 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810384 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810415 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810444 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810466 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810488 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810509 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810530 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810551 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810571 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810592 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810615 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810637 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810661 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810685 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810707 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810729 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810752 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810776 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810799 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810821 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810840 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810866 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810886 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810908 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810931 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810953 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810975 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.810994 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811013 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811033 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811054 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811073 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811092 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811112 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811134 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811174 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811200 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811220 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811243 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811269 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811309 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811334 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811357 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811380 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811403 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811427 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811450 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811474 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811499 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811520 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811543 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811563 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811584 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811607 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811630 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811653 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811674 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811696 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811719 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811741 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811762 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811785 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811807 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811830 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811852 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811873 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811898 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811920 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811943 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811968 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.811990 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812014 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812038 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812062 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812088 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812113 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812136 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812162 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812183 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812206 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812228 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812251 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812276 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812323 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812348 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812371 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812395 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812418 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812440 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812470 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812493 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812516 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812540 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812565 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812589 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812635 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812661 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812685 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812712 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812737 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812739 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812780 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812809 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812837 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812860 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812882 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812907 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812904 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812934 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812961 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.812984 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813009 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813034 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813060 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813084 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813090 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813107 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813136 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813161 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813185 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813213 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813237 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813263 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818855 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818904 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818932 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818963 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818988 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819012 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819039 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819063 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819088 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819115 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819139 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819162 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819182 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819206 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819234 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819257 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819280 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819320 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819344 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819369 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819391 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819414 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819438 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819463 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819486 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819518 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819541 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819566 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819620 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819648 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819673 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819705 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819730 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819758 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819808 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819832 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819857 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819884 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819907 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819929 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.819952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821753 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.830818 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813261 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813448 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813608 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813637 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.813883 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814128 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814164 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814602 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814608 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814817 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814899 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.814887 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815070 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815148 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815211 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815415 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815465 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815625 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815682 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815847 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815872 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815989 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.815988 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816086 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816244 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816428 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816462 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816458 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816488 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816620 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816756 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816804 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.816885 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.817491 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818333 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.818568 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.820037 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821225 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821247 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821343 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821363 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821447 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821460 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821632 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821733 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.821916 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.822176 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.822989 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.823110 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.823799 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.824237 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.824907 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.825135 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.825338 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.828675 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.829252 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.829365 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.829398 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.829581 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.829866 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.830201 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.830238 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.830271 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.830398 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.832452 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.832534 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.832715 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.832914 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.833396 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.834066 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.842577 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.844352 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.844742 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.844996 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.845337 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.845353 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.845573 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.845779 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.846325 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.847678 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.848001 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.848063 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.848457 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.849132 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.849410 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.849808 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.850517 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.850710 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852143 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852139 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852347 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852480 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852710 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852934 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.852971 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.853601 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.854842 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.854884 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:55:11.354860738 +0000 UTC m=+18.932976563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.855137 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.855393 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.857735 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.858036 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.858716 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.858967 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.859056 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.859241 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.860275 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.860555 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.861217 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.861652 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.861923 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.862114 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.863108 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.863235 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.863259 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.863372 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864444 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.864484 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:11.364464343 +0000 UTC m=+18.942580168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.863617 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.863674 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864032 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864075 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864163 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864255 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864551 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.864987 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.865115 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.865502 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.866407 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.866549 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.866747 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.866990 4773 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.868545 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.868688 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.868711 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.868772 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.868921 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.869133 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.869576 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.869730 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.869891 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.869944 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:11.369927988 +0000 UTC m=+18.948043813 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.870716 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.871519 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.871684 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.871822 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.875675 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.876008 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.876080 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.876187 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.876896 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.877601 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.877782 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.877958 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.878137 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.878436 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.878617 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.878788 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.880784 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.881282 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.881785 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.881858 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.882647 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.884374 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.884902 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.886661 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.887352 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.887599 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.887723 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.888004 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.888043 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.888543 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.888941 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.896369 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.898469 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.898562 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.898573 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.898650 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.898713 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:11.398693244 +0000 UTC m=+18.976809139 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.905584 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.906532 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.906667 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.908007 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.910374 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.910727 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.912146 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.913780 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.914918 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.915843 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.915964 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.917843 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.918240 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.918349 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.918467 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.918631 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.919978 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.925397 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.925425 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.925439 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:10 crc kubenswrapper[4773]: E0122 11:55:10.925522 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:11.425484897 +0000 UTC m=+19.003600722 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.926495 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.926728 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.931592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.931969 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.932327 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.932463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.932642 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.932883 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.933426 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.934141 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.934186 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lznvd\" (UniqueName: \"kubernetes.io/projected/656fa143-a073-42b1-93cf-e093ff7c285c-kube-api-access-lznvd\") pod \"node-resolver-shp5z\" (UID: \"656fa143-a073-42b1-93cf-e093ff7c285c\") " pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.934213 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.934261 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/656fa143-a073-42b1-93cf-e093ff7c285c-hosts-file\") pod \"node-resolver-shp5z\" (UID: \"656fa143-a073-42b1-93cf-e093ff7c285c\") " pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.934568 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.935239 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.939621 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.939795 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.939951 4773 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.939985 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940001 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940013 4773 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940062 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940077 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940089 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940101 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940118 4773 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940130 4773 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940142 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940153 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940169 4773 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940182 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940194 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940205 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940220 4773 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940231 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940244 4773 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940260 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940272 4773 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940310 4773 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940324 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940340 4773 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940352 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940364 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940376 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940393 4773 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940405 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940417 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940431 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940447 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940458 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940470 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940486 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940498 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940509 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940521 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940560 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940579 4773 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940590 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940601 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940616 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940628 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940641 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940656 4773 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940668 4773 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940679 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940691 4773 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940706 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940719 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940748 4773 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940760 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940776 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940789 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940802 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940814 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940829 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940839 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940850 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940867 4773 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940882 4773 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940909 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940923 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940938 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940952 4773 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940967 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940980 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.940998 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941011 4773 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941024 4773 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941037 4773 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941051 4773 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941064 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941076 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941096 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941122 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941133 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941145 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941163 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941183 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941196 4773 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941209 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941225 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941237 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941249 4773 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941265 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.941276 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.942139 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.943994 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.945326 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.945396 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.945821 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949797 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949855 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949867 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949878 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949888 4773 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949897 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949921 4773 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949930 4773 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949939 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949948 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949957 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949966 4773 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949976 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.949996 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950007 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950017 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950028 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950037 4773 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950069 4773 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950077 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950086 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950095 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950104 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950113 4773 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950137 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950146 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950155 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950163 4773 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950171 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950179 4773 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950188 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950197 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950221 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950229 4773 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950237 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950246 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950254 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950264 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950272 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950299 4773 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950309 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950317 4773 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950326 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950334 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950342 4773 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950350 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950358 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950378 4773 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950386 4773 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950394 4773 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950407 4773 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950415 4773 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950447 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950455 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950463 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950471 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950479 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950487 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950496 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950518 4773 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950527 4773 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950535 4773 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950544 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950554 4773 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950563 4773 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950571 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950594 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950602 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950610 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950618 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950625 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950634 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950647 4773 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950656 4773 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950679 4773 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950686 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950695 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950721 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950729 4773 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950753 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950761 4773 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950769 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950777 4773 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950790 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950800 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950808 4773 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950830 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950838 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950847 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950855 4773 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950863 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950871 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950879 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950887 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950911 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950919 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950928 4773 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950937 4773 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.950945 4773 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.952768 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.962634 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.976153 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:55:10 crc kubenswrapper[4773]: I0122 11:55:10.983098 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.001967 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.012887 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.024060 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.036679 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.048102 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051689 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lznvd\" (UniqueName: \"kubernetes.io/projected/656fa143-a073-42b1-93cf-e093ff7c285c-kube-api-access-lznvd\") pod \"node-resolver-shp5z\" (UID: \"656fa143-a073-42b1-93cf-e093ff7c285c\") " pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/656fa143-a073-42b1-93cf-e093ff7c285c-hosts-file\") pod \"node-resolver-shp5z\" (UID: \"656fa143-a073-42b1-93cf-e093ff7c285c\") " pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051775 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051785 4773 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051795 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051803 4773 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051811 4773 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051825 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.051981 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/656fa143-a073-42b1-93cf-e093ff7c285c-hosts-file\") pod \"node-resolver-shp5z\" (UID: \"656fa143-a073-42b1-93cf-e093ff7c285c\") " pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.068981 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.077170 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lznvd\" (UniqueName: \"kubernetes.io/projected/656fa143-a073-42b1-93cf-e093ff7c285c-kube-api-access-lznvd\") pod \"node-resolver-shp5z\" (UID: \"656fa143-a073-42b1-93cf-e093ff7c285c\") " pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.080828 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.094824 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.115666 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-shp5z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.237661 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 11:55:11 crc kubenswrapper[4773]: W0122 11:55:11.250370 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-04e73254490ea2ae56a589e099d87f6850fad283ea628d297fea8f610332fb6a WatchSource:0}: Error finding container 04e73254490ea2ae56a589e099d87f6850fad283ea628d297fea8f610332fb6a: Status 404 returned error can't find the container with id 04e73254490ea2ae56a589e099d87f6850fad283ea628d297fea8f610332fb6a Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.354976 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.355172 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:55:12.355152681 +0000 UTC m=+19.933268506 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.455721 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.455780 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.455812 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.455838 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.455863 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.455937 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:12.455919073 +0000 UTC m=+20.034034898 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.455970 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.455988 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.455999 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456043 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456074 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456071 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456089 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456052 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:12.456041966 +0000 UTC m=+20.034157791 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456334 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:12.456276362 +0000 UTC m=+20.034392187 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:11 crc kubenswrapper[4773]: E0122 11:55:11.456357 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:12.456350914 +0000 UTC m=+20.034466739 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.584028 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-22 11:50:10 +0000 UTC, rotation deadline is 2026-11-02 02:47:40.039582469 +0000 UTC Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.584109 4773 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6806h52m28.455477395s for next certificate rotation Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.623967 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 00:32:44.741553854 +0000 UTC Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.761493 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.761551 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"04e73254490ea2ae56a589e099d87f6850fad283ea628d297fea8f610332fb6a"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.763782 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.763857 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.763870 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8fe959e27a62000a7714d8793680c35c64ef79721dacb3f46ffc9166d4431178"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.767363 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.769631 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.774888 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.783915 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-shp5z" event={"ID":"656fa143-a073-42b1-93cf-e093ff7c285c","Type":"ContainerStarted","Data":"905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.784190 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-shp5z" event={"ID":"656fa143-a073-42b1-93cf-e093ff7c285c","Type":"ContainerStarted","Data":"b8c6e914e80a5b9f5789646aa64051bdb1958d79d20a1b348d07a1600ce412c0"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.789100 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"dd954530b929faee731fc4e75db657c730203b5c1461e4083fa31dbb02bcd242"} Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.791907 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.806657 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.823854 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.838438 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.854728 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.866427 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.880567 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.904401 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.922436 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.939441 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.956195 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.968346 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:11 crc kubenswrapper[4773]: I0122 11:55:11.985194 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:11Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.007084 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.022131 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.042199 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.079900 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-hhxm5"] Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.080487 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.082752 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.082751 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.083201 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.083761 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.085194 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.085665 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-9bldd"] Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.086229 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-tsndt"] Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.086464 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.086477 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.087647 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.088580 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.089364 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.089454 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.090150 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.090198 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.090220 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.092633 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pplsq"] Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.093394 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.098044 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.098155 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.098199 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.098234 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.098389 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.099744 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.099870 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.112354 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.136339 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.152114 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161652 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4szgh\" (UniqueName: \"kubernetes.io/projected/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-kube-api-access-4szgh\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161722 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-conf-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161772 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-env-overrides\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161797 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-script-lib\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161838 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-socket-dir-parent\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161882 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-k8s-cni-cncf-io\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161918 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-hostroot\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.161947 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-log-socket\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162014 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162138 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-etc-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162212 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-system-cni-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162243 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-system-cni-dir\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162271 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cni-binary-copy\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162326 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfndz\" (UniqueName: \"kubernetes.io/projected/6f173bdf-8981-4274-8d1b-ec68a44fefa7-kube-api-access-nfndz\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162366 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-cni-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162389 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-kubelet\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162417 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-netns\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162489 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-cni-bin\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162538 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-var-lib-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162560 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162582 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-os-release\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162609 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-proxy-tls\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162632 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73fd13f5-159b-444d-9d03-1e5fdd943673-cni-binary-copy\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162656 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cnibin\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162697 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-cnibin\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162732 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-mcd-auth-proxy-config\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162750 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-systemd-units\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162899 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-node-log\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.162985 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-multus-certs\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163146 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-systemd\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163173 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-bin\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163204 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-cni-multus\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163229 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-etc-kubernetes\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163350 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-daemon-config\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163385 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-rootfs\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163411 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lphw\" (UniqueName: \"kubernetes.io/projected/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-kube-api-access-7lphw\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163433 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-kubelet\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163454 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-ovn-kubernetes\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163473 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-ovn\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163492 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhmh2\" (UniqueName: \"kubernetes.io/projected/73fd13f5-159b-444d-9d03-1e5fdd943673-kube-api-access-jhmh2\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163563 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-netns\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163614 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-netd\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163651 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163715 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-os-release\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163767 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-slash\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163806 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-config\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.163832 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovn-node-metrics-cert\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.195417 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.240299 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.261955 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265438 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4szgh\" (UniqueName: \"kubernetes.io/projected/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-kube-api-access-4szgh\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265474 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-conf-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265500 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-log-socket\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265523 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-env-overrides\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-script-lib\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265557 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-socket-dir-parent\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265572 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-k8s-cni-cncf-io\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265590 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-hostroot\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265618 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265641 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-etc-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265644 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-log-socket\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265712 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-system-cni-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265713 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-conf-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265724 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-hostroot\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-system-cni-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265787 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-k8s-cni-cncf-io\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265826 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-etc-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265845 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-system-cni-dir\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265875 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cni-binary-copy\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265868 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-socket-dir-parent\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265896 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfndz\" (UniqueName: \"kubernetes.io/projected/6f173bdf-8981-4274-8d1b-ec68a44fefa7-kube-api-access-nfndz\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265879 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-system-cni-dir\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265924 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-kubelet\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.265961 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-cni-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266062 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-kubelet\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266084 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-cni-dir\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266109 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266133 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-cni-bin\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266152 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-netns\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266180 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-var-lib-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266185 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266201 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266214 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-netns\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266221 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-os-release\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266238 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-cni-bin\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266243 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-proxy-tls\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266262 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-var-lib-openvswitch\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266273 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-cnibin\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266300 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266328 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-os-release\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266310 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73fd13f5-159b-444d-9d03-1e5fdd943673-cni-binary-copy\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266360 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-env-overrides\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266382 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cnibin\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266365 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cnibin\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266417 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-multus-certs\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266425 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-cnibin\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266440 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-mcd-auth-proxy-config\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266457 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-systemd-units\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266472 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-node-log\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266489 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-systemd\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266507 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-bin\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266525 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-cni-multus\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266529 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-script-lib\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266546 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-etc-kubernetes\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266567 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-node-log\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266586 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-ovn-kubernetes\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266590 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-systemd-units\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266456 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-run-multus-certs\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266608 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-daemon-config\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266621 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-host-var-lib-cni-multus\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266626 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-rootfs\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266643 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-systemd\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266643 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lphw\" (UniqueName: \"kubernetes.io/projected/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-kube-api-access-7lphw\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266672 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-kubelet\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266692 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-netns\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266713 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-ovn\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266734 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhmh2\" (UniqueName: \"kubernetes.io/projected/73fd13f5-159b-444d-9d03-1e5fdd943673-kube-api-access-jhmh2\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266766 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266794 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6f173bdf-8981-4274-8d1b-ec68a44fefa7-cni-binary-copy\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266802 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-etc-kubernetes\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266772 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266824 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-netns\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266845 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-bin\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266852 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-netd\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266857 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-kubelet\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266885 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-ovn-kubernetes\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266890 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovn-node-metrics-cert\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266910 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-netd\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266888 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-rootfs\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266919 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-os-release\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266955 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-slash\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266962 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/73fd13f5-159b-444d-9d03-1e5fdd943673-os-release\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266996 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-config\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.266936 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-ovn\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.267102 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-slash\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.267395 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-mcd-auth-proxy-config\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.267438 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6f173bdf-8981-4274-8d1b-ec68a44fefa7-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.267438 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/73fd13f5-159b-444d-9d03-1e5fdd943673-cni-binary-copy\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.267503 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/73fd13f5-159b-444d-9d03-1e5fdd943673-multus-daemon-config\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.267598 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-config\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.270665 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-proxy-tls\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.281574 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovn-node-metrics-cert\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.300397 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4szgh\" (UniqueName: \"kubernetes.io/projected/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-kube-api-access-4szgh\") pod \"ovnkube-node-pplsq\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.304986 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lphw\" (UniqueName: \"kubernetes.io/projected/d5a0935c-0094-42bc-a9e7-bf3fd046e23d-kube-api-access-7lphw\") pod \"machine-config-daemon-hhxm5\" (UID: \"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\") " pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.305442 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfndz\" (UniqueName: \"kubernetes.io/projected/6f173bdf-8981-4274-8d1b-ec68a44fefa7-kube-api-access-nfndz\") pod \"multus-additional-cni-plugins-9bldd\" (UID: \"6f173bdf-8981-4274-8d1b-ec68a44fefa7\") " pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.306378 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.322873 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhmh2\" (UniqueName: \"kubernetes.io/projected/73fd13f5-159b-444d-9d03-1e5fdd943673-kube-api-access-jhmh2\") pod \"multus-tsndt\" (UID: \"73fd13f5-159b-444d-9d03-1e5fdd943673\") " pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.325002 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.337408 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.353656 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.367532 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.367912 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:55:14.3678251 +0000 UTC m=+21.945940935 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.370899 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.387454 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.397969 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.404740 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.406929 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-tsndt" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.413368 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9bldd" Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.423009 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73fd13f5_159b_444d_9d03_1e5fdd943673.slice/crio-840062241df15539ff701968bd27190a16bdd2fac7e4fa9c2e8f1fd32a110d82 WatchSource:0}: Error finding container 840062241df15539ff701968bd27190a16bdd2fac7e4fa9c2e8f1fd32a110d82: Status 404 returned error can't find the container with id 840062241df15539ff701968bd27190a16bdd2fac7e4fa9c2e8f1fd32a110d82 Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.423584 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.433241 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.441078 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f8370d6_6cd8_4e4c_8bad_aaaa3ac332d5.slice/crio-a98c03095c6184e8a30264e8a96c3f4785f57162b0323cc2ae78b9842ffb6b0a WatchSource:0}: Error finding container a98c03095c6184e8a30264e8a96c3f4785f57162b0323cc2ae78b9842ffb6b0a: Status 404 returned error can't find the container with id a98c03095c6184e8a30264e8a96c3f4785f57162b0323cc2ae78b9842ffb6b0a Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.451492 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.468680 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.468784 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.468830 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.468851 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.468872 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.468991 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469008 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469020 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469065 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:14.469051504 +0000 UTC m=+22.047167329 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469085 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469111 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469129 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469198 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:14.469174007 +0000 UTC m=+22.047290012 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469275 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469340 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:14.469328691 +0000 UTC m=+22.047444766 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469405 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.469436 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:14.469427804 +0000 UTC m=+22.047543879 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.496465 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.526534 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.548563 4773 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.548901 4773 reflector.go:484] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549033 4773 reflector.go:484] object-"openshift-multus"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549062 4773 reflector.go:484] object-"openshift-multus"/"default-dockercfg-2q5b6": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"default-dockercfg-2q5b6": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549084 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549109 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549559 4773 reflector.go:484] object-"openshift-machine-config-operator"/"proxy-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"proxy-tls": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549590 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.549613 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.549708 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-console/pods/networking-console-plugin-85b44fc459-gdk6g/status\": read tcp 38.102.83.97:43000->38.102.83.97:6443: use of closed network connection" Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.550253 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556050 4773 reflector.go:484] object-"openshift-multus"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556153 4773 reflector.go:484] object-"openshift-multus"/"multus-daemon-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"multus-daemon-config": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556510 4773 reflector.go:484] object-"openshift-multus"/"cni-copy-resources": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"cni-copy-resources": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556634 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-config": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556701 4773 reflector.go:484] object-"openshift-multus"/"default-cni-sysctl-allowlist": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"default-cni-sysctl-allowlist": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556743 4773 reflector.go:484] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.556791 4773 reflector.go:484] object-"openshift-machine-config-operator"/"kube-rbac-proxy": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-rbac-proxy": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.557265 4773 reflector.go:484] object-"openshift-machine-config-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.557340 4773 reflector.go:484] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: W0122 11:55:12.557412 4773 reflector.go:484] object-"openshift-ovn-kubernetes"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.587961 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.626396 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 16:46:08.747413853 +0000 UTC Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.630852 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.657248 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.657306 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.657264 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.657429 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.657579 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:12 crc kubenswrapper[4773]: E0122 11:55:12.657726 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.661669 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.662433 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.663711 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.664474 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.665705 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.666261 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.666906 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.667909 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.668554 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.670825 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.671323 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.677061 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.677991 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.678654 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.682111 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.683619 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.684989 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.685434 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.686919 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.687061 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.693974 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.694558 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.695692 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.696173 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.697386 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.697876 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.706253 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.707133 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.707647 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.708801 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.709259 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.713737 4773 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.713871 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.715822 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.717076 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.717714 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.719608 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.721869 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.722720 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.724227 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.725146 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.726389 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.727169 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.728352 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.729315 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.730164 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.731388 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.733273 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.734776 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.735015 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.736692 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.737651 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.738635 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.740932 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.741920 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.742567 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.759556 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.841208 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.849484 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.849544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.849556 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"2c13e72d14f321a5ec2c9be20308204d7ef8176c63310259b683efb38d9b3f3d"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.853039 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerStarted","Data":"9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.853071 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerStarted","Data":"840062241df15539ff701968bd27190a16bdd2fac7e4fa9c2e8f1fd32a110d82"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.858618 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" exitCode=0 Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.858683 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.858707 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"a98c03095c6184e8a30264e8a96c3f4785f57162b0323cc2ae78b9842ffb6b0a"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.886593 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.902590 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerStarted","Data":"348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.902761 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerStarted","Data":"3df2ad2a1b477aebdf01addad9f16c660c69cd831f0818c7349f481b0123f5da"} Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.920053 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.948257 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.964193 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:12 crc kubenswrapper[4773]: I0122 11:55:12.981265 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.001498 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.018765 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.032735 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.048303 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.065953 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.083719 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.102915 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.120256 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.134421 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.149765 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.164221 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.185538 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.199562 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.216563 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.233242 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.354932 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.380586 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.395129 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.444045 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.532907 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.627899 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 09:44:54.139471728 +0000 UTC Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.678731 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.702474 4773 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.705052 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.705135 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.705147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.705269 4773 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.713978 4773 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.714395 4773 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.717992 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.718054 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.718069 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.718093 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.718106 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: E0122 11:55:13.735633 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.738513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.738551 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.738563 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.738579 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.738589 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: E0122 11:55:13.748883 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.752249 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.752305 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.752317 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.752336 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.752357 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: E0122 11:55:13.764976 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.770093 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.770160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.770176 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.770206 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.770224 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.773654 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 11:55:13 crc kubenswrapper[4773]: E0122 11:55:13.782893 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.786180 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.786215 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.786225 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.786240 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.786249 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.791847 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 11:55:13 crc kubenswrapper[4773]: E0122 11:55:13.798640 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: E0122 11:55:13.798758 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.800661 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.800704 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.800716 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.800734 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.800747 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.811805 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.845395 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.860316 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.878496 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.903351 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.903389 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.903398 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.903418 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.903435 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:13Z","lastTransitionTime":"2026-01-22T11:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.906572 4773 generic.go:334] "Generic (PLEG): container finished" podID="6f173bdf-8981-4274-8d1b-ec68a44fefa7" containerID="348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285" exitCode=0 Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.906649 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerDied","Data":"348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.908572 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.910719 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.910760 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.916856 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.928749 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.941564 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.958146 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.974359 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.984776 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.988983 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:13Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.996295 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 11:55:13 crc kubenswrapper[4773]: I0122 11:55:13.999673 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.007387 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.007421 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.007435 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.007451 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.007460 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.009066 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.025976 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.041045 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.059705 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.068212 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.066810 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.086263 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.099538 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.100640 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.110783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.110814 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.110825 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.110844 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.110854 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.114791 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.131387 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.145750 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.158104 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.171466 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.195684 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.213741 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.214087 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.214162 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.214232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.214319 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.217921 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.234709 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.254155 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.265848 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.282414 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.309344 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.317000 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.317042 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.317050 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.317064 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.317075 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.363760 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.418616 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-258ll"] Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.419055 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.419713 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.419769 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.419783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.419810 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.419822 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.421916 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.422106 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:55:18.422092916 +0000 UTC m=+26.000208741 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.425794 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.430827 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.433266 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.433299 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.470960 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.509537 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.521650 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.521702 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.521713 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.521731 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.521742 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.522738 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.522782 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhfhr\" (UniqueName: \"kubernetes.io/projected/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-kube-api-access-nhfhr\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.522814 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.522880 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.522912 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.522995 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.523001 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-serviceca\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523033 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523009 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523130 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523127 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523087 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:18.523063473 +0000 UTC m=+26.101179478 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.523316 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-host\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523137 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.531445 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.523146 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.531412 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:18.531369104 +0000 UTC m=+26.109484929 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.532115 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:18.531530198 +0000 UTC m=+26.109646023 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.532161 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:18.532146064 +0000 UTC m=+26.110261889 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.551895 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.589010 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.624912 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.624982 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.624995 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.625030 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.625043 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.629057 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 12:36:55.908944102 +0000 UTC Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.632243 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.632410 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhfhr\" (UniqueName: \"kubernetes.io/projected/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-kube-api-access-nhfhr\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.632492 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-serviceca\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.632523 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-host\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.632597 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-host\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.633604 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-serviceca\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.658112 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.658178 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.658244 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.658178 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.658328 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:14 crc kubenswrapper[4773]: E0122 11:55:14.658433 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.688626 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhfhr\" (UniqueName: \"kubernetes.io/projected/db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c-kube-api-access-nhfhr\") pod \"node-ca-258ll\" (UID: \"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\") " pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.697862 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.727948 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.727983 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.727996 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.728012 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.728022 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.729602 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.731848 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-258ll" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.769553 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: W0122 11:55:14.808104 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb4bd0eb_fc9a_4db0_b1fb_7f01c9f4732c.slice/crio-344c99bbb90c535ddcea4765919a432383a6583156d4d537fddec44b965a307f WatchSource:0}: Error finding container 344c99bbb90c535ddcea4765919a432383a6583156d4d537fddec44b965a307f: Status 404 returned error can't find the container with id 344c99bbb90c535ddcea4765919a432383a6583156d4d537fddec44b965a307f Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.812705 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.831815 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.832370 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.832388 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.832411 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.832422 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.855831 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.888483 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.917666 4773 generic.go:334] "Generic (PLEG): container finished" podID="6f173bdf-8981-4274-8d1b-ec68a44fefa7" containerID="9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b" exitCode=0 Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.917704 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerDied","Data":"9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.918691 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-258ll" event={"ID":"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c","Type":"ContainerStarted","Data":"344c99bbb90c535ddcea4765919a432383a6583156d4d537fddec44b965a307f"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.929895 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.930078 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.930127 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.930140 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.930150 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.935037 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.935091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.935107 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.935125 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.935137 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:14Z","lastTransitionTime":"2026-01-22T11:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:14 crc kubenswrapper[4773]: I0122 11:55:14.975614 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:14Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.007835 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.037431 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.037472 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.037483 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.037501 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.037512 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.049538 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.088254 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.128851 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.140593 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.140635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.140647 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.140672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.140687 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.168752 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.218889 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.243630 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.243690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.243702 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.243724 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.243739 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.253061 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.291104 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.331768 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.345902 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.345950 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.345961 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.345979 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.345990 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.374775 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.410785 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.448480 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.448517 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.448528 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.448546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.448558 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.450084 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.491754 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.552274 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.552341 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.552350 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.552366 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.552375 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.629176 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 03:43:23.496700961 +0000 UTC Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.655186 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.655239 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.655248 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.655265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.655274 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.757973 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.758003 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.758017 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.758033 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.758045 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.860841 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.860881 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.860889 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.860904 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.860912 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.934211 4773 generic.go:334] "Generic (PLEG): container finished" podID="6f173bdf-8981-4274-8d1b-ec68a44fefa7" containerID="e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733" exitCode=0 Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.934269 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerDied","Data":"e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.935836 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-258ll" event={"ID":"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c","Type":"ContainerStarted","Data":"fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.946635 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.963658 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.963922 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.963945 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.963955 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.963970 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.963981 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:15Z","lastTransitionTime":"2026-01-22T11:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.975419 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:15 crc kubenswrapper[4773]: I0122 11:55:15.987787 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.004220 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.017071 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.031513 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.053078 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.065990 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.066034 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.066048 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.066065 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.066077 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.067125 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.081277 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.095069 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.111339 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.126065 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.140589 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.154182 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.168370 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.168582 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.168642 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.168700 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.168755 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.169998 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.180354 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.209678 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.249674 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.271041 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.271096 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.271113 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.271134 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.271149 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.288858 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.350959 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.370832 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.374350 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.374392 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.374404 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.374425 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.374438 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.411495 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.451544 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.476705 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.476912 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.477017 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.477090 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.477153 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.488455 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.534574 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.580327 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.580364 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.580375 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.580391 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.580401 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.629490 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 05:10:34.014788798 +0000 UTC Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.657027 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.657103 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.657139 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:16 crc kubenswrapper[4773]: E0122 11:55:16.657534 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:16 crc kubenswrapper[4773]: E0122 11:55:16.657609 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:16 crc kubenswrapper[4773]: E0122 11:55:16.657728 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.682765 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.682810 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.682822 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.682861 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.682873 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.785356 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.785399 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.785409 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.785421 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.785433 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.888574 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.888616 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.888627 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.888642 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.888654 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.941353 4773 generic.go:334] "Generic (PLEG): container finished" podID="6f173bdf-8981-4274-8d1b-ec68a44fefa7" containerID="97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e" exitCode=0 Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.941442 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerDied","Data":"97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.945508 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.958889 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.980236 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.991339 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.991699 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.991732 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.991742 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.991760 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:16 crc kubenswrapper[4773]: I0122 11:55:16.991769 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:16Z","lastTransitionTime":"2026-01-22T11:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.003237 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.012598 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.025339 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.038188 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.049963 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.062337 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.079096 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.093741 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.093780 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.093789 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.093804 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.093814 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.095042 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.105869 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.117198 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.175481 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.179141 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.184524 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.189218 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.196113 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.196144 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.196152 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.196165 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.196174 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.205599 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.216338 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.228262 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.269046 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.298864 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.298897 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.298907 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.298922 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.298931 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.307679 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.348214 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.393089 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.403304 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.403338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.403349 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.403365 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.403378 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.430958 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.470129 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.505736 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.505994 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.506114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.506203 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.506327 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.507978 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.592323 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.608727 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.608762 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.608770 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.608782 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.608791 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.613062 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.628432 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.630562 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 06:12:19.599721117 +0000 UTC Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.670882 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.709732 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.711010 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.711101 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.711178 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.711249 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.711328 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.748679 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.789634 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.814019 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.814056 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.814098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.814114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.814122 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.830488 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.867628 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.909709 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.916478 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.916515 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.916526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.916542 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.916552 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:17Z","lastTransitionTime":"2026-01-22T11:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.950965 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.951509 4773 generic.go:334] "Generic (PLEG): container finished" podID="6f173bdf-8981-4274-8d1b-ec68a44fefa7" containerID="d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc" exitCode=0 Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.952018 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerDied","Data":"d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc"} Jan 22 11:55:17 crc kubenswrapper[4773]: I0122 11:55:17.999075 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.021579 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.021620 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.021631 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.021646 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.021658 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.030241 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.071617 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.110116 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.124065 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.124104 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.124113 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.124128 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.124138 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.150929 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.189207 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.226344 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.226557 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.226619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.226689 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.226785 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.230359 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.268660 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.307164 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.329621 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.329664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.329676 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.329690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.329701 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.349346 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.393086 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.429833 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.431513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.431575 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.431587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.431604 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.431615 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.468715 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.490911 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.491095 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:55:26.491080836 +0000 UTC m=+34.069196661 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.508752 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.534801 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.534839 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.534851 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.534866 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.534878 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.552244 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.590508 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.591868 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.591906 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.591928 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.591949 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592055 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592073 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592082 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592123 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:26.592109425 +0000 UTC m=+34.170225250 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592160 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592211 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:26.592196807 +0000 UTC m=+34.170312632 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592246 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592268 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:26.592260899 +0000 UTC m=+34.170376724 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592435 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592504 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592570 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.592653 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:26.592643319 +0000 UTC m=+34.170759144 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.630464 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.631546 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 11:06:21.099144468 +0000 UTC Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.637184 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.637223 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.637233 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.637251 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.637264 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.657832 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.657959 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.657979 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.658009 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.658064 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:18 crc kubenswrapper[4773]: E0122 11:55:18.658119 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.668773 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.707762 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.739107 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.739137 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.739145 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.739158 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.739167 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.842124 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.842169 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.842185 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.842204 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.842219 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.944619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.944643 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.944651 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.944664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.944672 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:18Z","lastTransitionTime":"2026-01-22T11:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.959395 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.960410 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.960836 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.964948 4773 generic.go:334] "Generic (PLEG): container finished" podID="6f173bdf-8981-4274-8d1b-ec68a44fefa7" containerID="da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194" exitCode=0 Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.964989 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerDied","Data":"da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194"} Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.973933 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.981723 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.982702 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.985808 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:18 crc kubenswrapper[4773]: I0122 11:55:18.997543 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:18Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.007754 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.029850 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.041119 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.048043 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.048082 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.048095 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.048113 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.048124 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.051162 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.061550 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.070997 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.108362 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.149269 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.150775 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.150816 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.150824 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.150838 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.150848 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.187505 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.231044 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.253159 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.253194 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.253202 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.253217 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.253225 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.269267 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.315822 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.351226 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.355892 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.355933 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.355945 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.355960 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.355969 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.390309 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.428670 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.458278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.458395 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.458406 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.458420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.458430 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.476011 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.511355 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.549972 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.560585 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.560628 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.560643 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.560659 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.560671 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.587207 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.630668 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.632577 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 15:33:22.57121421 +0000 UTC Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.663055 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.663094 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.663103 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.663117 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.663127 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.667517 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.707733 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.749011 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.764835 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.764878 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.764892 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.764909 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.764920 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.789834 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.826707 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.867541 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.867568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.867575 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.867589 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.867598 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.969413 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.969449 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.969458 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.969471 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.969482 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:19Z","lastTransitionTime":"2026-01-22T11:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.974804 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.975169 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" event={"ID":"6f173bdf-8981-4274-8d1b-ec68a44fefa7","Type":"ContainerStarted","Data":"0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4"} Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.986346 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:19 crc kubenswrapper[4773]: I0122 11:55:19.998460 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:19Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.008971 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.023324 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.045626 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.073465 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.073508 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.073520 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.073537 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.073548 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.086121 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.107305 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.154416 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.175997 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.176049 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.176063 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.176080 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.176094 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.188836 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.235269 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.268080 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.278667 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.278715 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.278727 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.278745 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.278756 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.311124 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.348161 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.380840 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.380878 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.380888 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.380903 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.380935 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.391024 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:20Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.483006 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.483050 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.483060 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.483075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.483083 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.584984 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.585020 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.585028 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.585040 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.585048 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.632724 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 21:08:40.572695787 +0000 UTC Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.657292 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.657308 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:20 crc kubenswrapper[4773]: E0122 11:55:20.657458 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.657308 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:20 crc kubenswrapper[4773]: E0122 11:55:20.657540 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:20 crc kubenswrapper[4773]: E0122 11:55:20.657621 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.687536 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.687574 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.687584 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.687598 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.687608 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.789354 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.789384 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.789392 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.789406 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.789415 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.891276 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.891334 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.891347 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.891362 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.891373 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.977491 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.993600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.993637 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.993649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.993663 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:20 crc kubenswrapper[4773]: I0122 11:55:20.993673 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:20Z","lastTransitionTime":"2026-01-22T11:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.096227 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.096267 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.096276 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.096321 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.096336 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.198860 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.198906 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.198915 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.198929 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.198940 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.301753 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.301793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.301803 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.301820 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.301831 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.403956 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.403989 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.403997 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.404011 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.404022 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.507486 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.507584 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.507596 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.507612 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.507621 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.610194 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.610254 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.610265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.610328 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.610343 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.633917 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 08:01:59.391361119 +0000 UTC Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.712457 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.712497 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.712509 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.712528 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.712540 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.817935 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.817983 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.818000 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.818026 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.818073 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.920411 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.920475 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.920493 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.920517 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.920534 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:21Z","lastTransitionTime":"2026-01-22T11:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.983262 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/0.log" Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.985881 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb" exitCode=1 Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.985939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb"} Jan 22 11:55:21 crc kubenswrapper[4773]: I0122 11:55:21.986983 4773 scope.go:117] "RemoveContainer" containerID="c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.003505 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.016897 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.022919 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.022956 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.022967 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.022982 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.022992 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.037478 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.049834 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.063109 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.073572 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.088593 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.101952 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.118033 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.125199 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.125260 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.125274 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.125314 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.125332 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.131220 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.144906 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.156036 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.168465 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.179841 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.227383 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.227419 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.227427 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.227440 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.227448 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.330164 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.330410 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.330419 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.330437 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.330449 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.432553 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.432591 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.432606 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.432622 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.432631 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.534356 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.534402 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.534413 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.534429 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.534441 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.635086 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 21:03:30.567372469 +0000 UTC Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.636564 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.636606 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.636619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.636635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.636650 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.659669 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.659721 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:22 crc kubenswrapper[4773]: E0122 11:55:22.659818 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.659895 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:22 crc kubenswrapper[4773]: E0122 11:55:22.660072 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:22 crc kubenswrapper[4773]: E0122 11:55:22.660228 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.672435 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.684656 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.699828 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.713946 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.725612 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.737750 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.737783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.737790 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.737803 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.737811 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.741349 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.757007 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.771011 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.782619 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.799458 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.810850 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.825843 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.837829 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.839615 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.839649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.839661 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.839678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.839691 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.852957 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:22Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.942143 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.942181 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.942192 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.942206 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.942216 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:22Z","lastTransitionTime":"2026-01-22T11:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.991335 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/0.log" Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.994133 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047"} Jan 22 11:55:22 crc kubenswrapper[4773]: I0122 11:55:22.994251 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.007106 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.021136 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.032250 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.044803 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.044846 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.044857 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.044872 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.044886 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.045222 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.056531 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.075300 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.087573 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.099720 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.111009 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.124740 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.141804 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.146795 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.146833 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.146843 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.146861 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.146873 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.155220 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.166228 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.177127 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.249174 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.249220 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.249229 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.249243 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.249252 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.351192 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.351231 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.351245 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.351261 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.351270 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.456780 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.456829 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.456842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.456861 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.456873 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.559555 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.559599 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.559615 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.559630 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.559641 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.635799 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 16:39:44.208938711 +0000 UTC Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.662227 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.662268 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.662278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.662317 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.662328 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.765538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.765602 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.765614 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.765640 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.765653 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.840816 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.840853 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.840863 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.840879 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.840890 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: E0122 11:55:23.854061 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.859159 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.859407 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.859493 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.859564 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.859653 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: E0122 11:55:23.871353 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.875828 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.876006 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.876070 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.876160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.876274 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: E0122 11:55:23.888202 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.891320 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.891378 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.891415 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.891429 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.891438 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: E0122 11:55:23.902813 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.906742 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.906939 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.906997 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.907055 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.907184 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: E0122 11:55:23.917873 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:23Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:23 crc kubenswrapper[4773]: E0122 11:55:23.918211 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.919817 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.919933 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.920002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.920080 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.920149 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:23Z","lastTransitionTime":"2026-01-22T11:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.998526 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/1.log" Jan 22 11:55:23 crc kubenswrapper[4773]: I0122 11:55:23.999225 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/0.log" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.002415 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047" exitCode=1 Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.002455 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.002516 4773 scope.go:117] "RemoveContainer" containerID="c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.007351 4773 scope.go:117] "RemoveContainer" containerID="76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047" Jan 22 11:55:24 crc kubenswrapper[4773]: E0122 11:55:24.007634 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.021158 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.023690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.023805 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.023873 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.023951 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.024008 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.042811 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.056494 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.070909 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.081299 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.093894 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.108447 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.123440 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.126035 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.126067 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.126077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.126095 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.126108 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.136608 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.151260 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.166259 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.181626 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.193325 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.217082 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.230787 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.230849 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.230860 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.230876 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.230892 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.241888 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6"] Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.242462 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.244531 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.245175 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.250496 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmcm5\" (UniqueName: \"kubernetes.io/projected/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-kube-api-access-kmcm5\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.250575 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.250619 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.250650 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.260743 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.271639 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.282827 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.297100 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.309655 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.320723 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.332625 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.332667 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.332678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.332692 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.332702 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.333240 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.344828 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.351357 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.351448 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmcm5\" (UniqueName: \"kubernetes.io/projected/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-kube-api-access-kmcm5\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.351491 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.351527 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.352167 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.352430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.355865 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.357489 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.366927 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.367634 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmcm5\" (UniqueName: \"kubernetes.io/projected/4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8-kube-api-access-kmcm5\") pod \"ovnkube-control-plane-749d76644c-8xrc6\" (UID: \"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.376057 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.392988 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.404154 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.414629 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.425062 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:24Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.434685 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.434723 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.434735 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.434750 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.434763 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.536614 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.536652 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.536660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.536673 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.536683 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.555407 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" Jan 22 11:55:24 crc kubenswrapper[4773]: W0122 11:55:24.566495 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d2c33aa_28a3_4929_9cd5_d7022c5f0eb8.slice/crio-3181e37788da9f3b373bcc7b25139138ed21129f6d3c4fa936b9853726f55181 WatchSource:0}: Error finding container 3181e37788da9f3b373bcc7b25139138ed21129f6d3c4fa936b9853726f55181: Status 404 returned error can't find the container with id 3181e37788da9f3b373bcc7b25139138ed21129f6d3c4fa936b9853726f55181 Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.636075 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 03:04:35.712606449 +0000 UTC Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.639722 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.639757 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.639768 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.639787 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.639800 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.657403 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:24 crc kubenswrapper[4773]: E0122 11:55:24.657510 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.658013 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:24 crc kubenswrapper[4773]: E0122 11:55:24.658163 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.658439 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:24 crc kubenswrapper[4773]: E0122 11:55:24.658506 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.742463 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.742786 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.742797 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.742810 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.742820 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.845664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.845707 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.845718 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.845765 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.845779 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.948792 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.948823 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.948831 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.948845 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:24 crc kubenswrapper[4773]: I0122 11:55:24.948853 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:24Z","lastTransitionTime":"2026-01-22T11:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.008105 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" event={"ID":"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8","Type":"ContainerStarted","Data":"0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.008152 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" event={"ID":"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8","Type":"ContainerStarted","Data":"f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.008164 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" event={"ID":"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8","Type":"ContainerStarted","Data":"3181e37788da9f3b373bcc7b25139138ed21129f6d3c4fa936b9853726f55181"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.010392 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/1.log" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.019213 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.035746 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.047953 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.050564 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.050600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.050610 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.050636 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.050646 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.059113 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.071188 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.081935 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.092462 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.101522 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.116099 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.125590 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.136617 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.149513 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.153880 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.153913 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.153921 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.153934 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.153953 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.164690 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.176226 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.189246 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.255941 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.256215 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.256309 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.256380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.256443 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.341105 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-5tqwr"] Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.341863 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: E0122 11:55:25.342013 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.352243 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.358583 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.358624 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.358636 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.358654 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.358666 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.358928 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bktm\" (UniqueName: \"kubernetes.io/projected/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-kube-api-access-2bktm\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.359003 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.361863 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.372844 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.384946 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.397042 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.407168 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.424261 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.435752 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.446405 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.456844 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.459514 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.459569 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bktm\" (UniqueName: \"kubernetes.io/projected/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-kube-api-access-2bktm\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: E0122 11:55:25.459702 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:25 crc kubenswrapper[4773]: E0122 11:55:25.459784 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:25.959764232 +0000 UTC m=+33.537880057 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.460328 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.460380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.460390 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.460404 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.460415 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.471520 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.476415 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bktm\" (UniqueName: \"kubernetes.io/projected/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-kube-api-access-2bktm\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.483571 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.493304 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.504067 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.517322 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.527753 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.528968 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.539617 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.553339 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.562345 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.562380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.562391 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.562408 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.562419 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.565403 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.574662 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.588379 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.602682 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.614962 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.626505 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.637520 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 17:38:29.676561003 +0000 UTC Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.645022 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7e91e464ac26064a183bda2b2342665879ea1236b26efa0ea94c90e108ab8cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:21Z\\\",\\\"message\\\":\\\"pping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 11:55:21.251763 6050 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 11:55:21.251790 6050 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 11:55:21.251795 6050 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 11:55:21.251806 6050 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 11:55:21.251810 6050 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 11:55:21.251829 6050 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 11:55:21.251844 6050 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0122 11:55:21.251834 6050 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0122 11:55:21.251833 6050 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 11:55:21.251856 6050 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0122 11:55:21.251888 6050 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 11:55:21.251901 6050 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 11:55:21.251917 6050 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 11:55:21.251959 6050 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 11:55:21.251932 6050 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.659128 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.665721 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.665790 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.665802 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.665816 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.665826 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.671122 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.685601 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.696430 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.711553 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.726192 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.736642 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:25Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.767838 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.767878 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.767890 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.767906 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.767918 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.869932 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.869967 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.869975 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.869989 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.869996 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.966195 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:25 crc kubenswrapper[4773]: E0122 11:55:25.966372 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:25 crc kubenswrapper[4773]: E0122 11:55:25.966425 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:26.966408384 +0000 UTC m=+34.544524209 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.972619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.972659 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.972669 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.972686 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:25 crc kubenswrapper[4773]: I0122 11:55:25.972699 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:25Z","lastTransitionTime":"2026-01-22T11:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.075255 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.075302 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.075324 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.075338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.075346 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.178526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.178551 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.178560 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.178573 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.178581 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.282057 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.282316 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.282413 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.282477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.282629 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.385417 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.385459 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.385470 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.385486 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.385499 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.488165 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.488455 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.488572 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.488650 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.488708 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.572264 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.572662 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:55:42.572631356 +0000 UTC m=+50.150747181 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.590723 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.590773 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.590784 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.590800 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.590812 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.638692 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 02:51:33.664015765 +0000 UTC Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.656971 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.657014 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.656972 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.657128 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.657184 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.657272 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.673551 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.673599 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.673641 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.673668 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673742 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673762 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673783 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673796 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673801 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673811 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673817 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673802 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:42.673785428 +0000 UTC m=+50.251901253 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673762 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673858 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:42.67384768 +0000 UTC m=+50.251963505 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673879 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:42.673871631 +0000 UTC m=+50.251987456 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.673896 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:42.673886611 +0000 UTC m=+50.252002436 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.692989 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.693035 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.693044 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.693057 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.693065 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.794961 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.795015 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.795025 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.795039 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.795048 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.896930 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.896973 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.897098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.897113 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.897125 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:26Z","lastTransitionTime":"2026-01-22T11:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:26 crc kubenswrapper[4773]: I0122 11:55:26.977469 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.977829 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:26 crc kubenswrapper[4773]: E0122 11:55:26.977934 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:28.977908991 +0000 UTC m=+36.556024886 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.001713 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.001786 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.001806 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.001824 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.001838 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.107182 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.107234 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.107246 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.107265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.107276 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.210760 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.210819 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.210835 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.210866 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.210884 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.313625 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.313672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.313684 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.313702 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.313713 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.416266 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.416335 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.416347 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.416365 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.416377 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.519232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.519266 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.519277 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.519325 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.519336 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.622104 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.622156 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.622168 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.622188 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.622203 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.639103 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 00:47:25.023156341 +0000 UTC Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.657717 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:27 crc kubenswrapper[4773]: E0122 11:55:27.657882 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.725353 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.725414 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.725440 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.725469 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.725486 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.827680 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.827763 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.827781 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.827810 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.827826 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.930328 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.930406 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.930434 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.930462 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:27 crc kubenswrapper[4773]: I0122 11:55:27.930482 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:27Z","lastTransitionTime":"2026-01-22T11:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.032997 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.033046 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.033058 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.033075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.033090 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.135381 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.135432 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.135444 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.135462 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.135476 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.238021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.238140 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.238165 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.238200 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.238223 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.340094 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.340139 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.340149 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.340172 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.340182 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.442969 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.443037 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.443055 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.443074 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.443086 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.546328 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.546386 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.546398 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.546418 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.546431 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.639714 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 05:53:28.849193535 +0000 UTC Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.648681 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.648744 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.648755 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.648769 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.648777 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.658080 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.658136 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.658146 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:28 crc kubenswrapper[4773]: E0122 11:55:28.658341 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:28 crc kubenswrapper[4773]: E0122 11:55:28.658472 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:28 crc kubenswrapper[4773]: E0122 11:55:28.658540 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.751645 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.751723 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.751737 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.751757 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.751771 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.854435 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.854526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.854543 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.854568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.854585 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.957497 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.957582 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.957607 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.957635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:28 crc kubenswrapper[4773]: I0122 11:55:28.957655 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:28Z","lastTransitionTime":"2026-01-22T11:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.009084 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:29 crc kubenswrapper[4773]: E0122 11:55:29.009262 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:29 crc kubenswrapper[4773]: E0122 11:55:29.009442 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:33.009403642 +0000 UTC m=+40.587519547 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.060029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.060071 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.060083 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.060099 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.060111 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.162997 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.163054 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.163065 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.163082 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.163092 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.265495 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.265525 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.265534 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.265547 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.265555 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.368252 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.368280 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.368308 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.368321 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.368330 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.471993 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.472053 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.472069 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.472092 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.472111 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.575023 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.575067 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.575081 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.575100 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.575116 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.640466 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 12:50:26.31219402 +0000 UTC Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.657872 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:29 crc kubenswrapper[4773]: E0122 11:55:29.658058 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.677476 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.677519 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.677538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.677562 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.677580 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.779967 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.780007 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.780016 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.780032 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.780043 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.882454 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.882513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.882524 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.882537 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.882546 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.984679 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.984720 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.984732 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.984746 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:29 crc kubenswrapper[4773]: I0122 11:55:29.984759 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:29Z","lastTransitionTime":"2026-01-22T11:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.087005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.087044 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.087062 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.087080 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.087092 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.188981 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.189031 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.189044 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.189060 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.189071 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.291043 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.291077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.291084 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.291098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.291107 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.393182 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.393221 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.393231 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.393246 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.393261 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.495160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.495195 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.495226 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.495251 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.495263 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.597014 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.597061 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.597075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.597091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.597102 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.640872 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 14:16:50.820027106 +0000 UTC Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.657536 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.657538 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:30 crc kubenswrapper[4773]: E0122 11:55:30.657714 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:30 crc kubenswrapper[4773]: E0122 11:55:30.657756 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.657545 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:30 crc kubenswrapper[4773]: E0122 11:55:30.657819 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.699467 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.699540 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.699552 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.699568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.699578 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.802218 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.802278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.802312 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.802329 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.802341 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.904742 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.904782 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.904793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.904812 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:30 crc kubenswrapper[4773]: I0122 11:55:30.904828 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:30Z","lastTransitionTime":"2026-01-22T11:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.007213 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.007259 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.007270 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.007314 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.007327 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.110256 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.110557 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.110619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.110702 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.110764 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.213804 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.214081 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.214142 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.214200 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.214253 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.316099 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.316137 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.316147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.316162 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.316174 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.318959 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.319694 4773 scope.go:117] "RemoveContainer" containerID="76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047" Jan 22 11:55:31 crc kubenswrapper[4773]: E0122 11:55:31.319830 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.334517 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.345728 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.360546 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.371955 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.383763 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.394896 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.405094 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.418057 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.418100 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.418109 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.418122 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.418133 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.418765 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.430390 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.441178 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.453234 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.464241 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.474938 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.485715 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.505438 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.519659 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:31Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.520689 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.520833 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.520910 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.520982 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.521048 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.624235 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.624569 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.624663 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.624762 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.624861 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.641759 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 21:12:26.503289394 +0000 UTC Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.657221 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:31 crc kubenswrapper[4773]: E0122 11:55:31.657495 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.727062 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.727106 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.727115 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.727132 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.727143 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.829390 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.829435 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.829445 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.829462 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.829473 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.932245 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.932278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.932306 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.932321 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:31 crc kubenswrapper[4773]: I0122 11:55:31.932332 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:31Z","lastTransitionTime":"2026-01-22T11:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.035100 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.035136 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.035147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.035164 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.035175 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.136934 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.136978 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.136986 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.136999 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.137009 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.239579 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.239635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.239651 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.239674 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.239692 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.341812 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.341857 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.341866 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.341882 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.341891 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.444328 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.444664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.444776 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.444883 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.444963 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.547230 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.547340 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.547355 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.547403 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.547414 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.643467 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 22:17:31.678894269 +0000 UTC Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.651118 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.651150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.651160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.651174 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.651184 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.657459 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:32 crc kubenswrapper[4773]: E0122 11:55:32.657830 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.657918 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.657952 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:32 crc kubenswrapper[4773]: E0122 11:55:32.658493 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:32 crc kubenswrapper[4773]: E0122 11:55:32.658415 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.675747 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.687076 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.696954 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.708252 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.722498 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.741784 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.753660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.753709 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.753721 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.753738 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.753750 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.754536 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.765167 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.777589 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.791350 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.802858 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.817024 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.834677 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.852301 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.855667 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.855699 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.855707 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.855723 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.855735 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.864848 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.875440 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:32Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.957925 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.957980 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.957999 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.958021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:32 crc kubenswrapper[4773]: I0122 11:55:32.958042 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:32Z","lastTransitionTime":"2026-01-22T11:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.046928 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:33 crc kubenswrapper[4773]: E0122 11:55:33.047076 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:33 crc kubenswrapper[4773]: E0122 11:55:33.047175 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:41.047149179 +0000 UTC m=+48.625265044 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.059872 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.059913 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.059924 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.059938 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.059947 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.162638 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.162686 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.162699 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.162716 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.162727 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.265275 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.265352 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.265368 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.265385 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.265396 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.367804 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.367847 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.367857 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.367875 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.367889 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.470150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.470208 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.470217 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.470232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.470241 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.573920 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.574271 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.574387 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.574508 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.574577 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.645410 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 04:36:14.837697185 +0000 UTC Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.657455 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:33 crc kubenswrapper[4773]: E0122 11:55:33.657661 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.677334 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.677379 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.677390 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.677405 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.677414 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.779650 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.779682 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.779690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.779703 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.779711 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.882812 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.882853 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.882865 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.882882 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.882893 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.985409 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.985443 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.985453 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.985467 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:33 crc kubenswrapper[4773]: I0122 11:55:33.985480 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:33Z","lastTransitionTime":"2026-01-22T11:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.088747 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.088809 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.088826 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.088846 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.088862 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.137107 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.137150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.137159 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.137173 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.137182 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.151435 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:34Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.155214 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.155298 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.155310 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.155326 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.155355 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.167119 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:34Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.170524 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.170561 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.170587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.170605 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.170617 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.181974 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:34Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.186678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.186729 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.186762 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.186783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.186794 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.199146 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:34Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.203228 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.203264 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.203277 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.203316 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.203327 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.214871 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:34Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.215257 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.216940 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.217063 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.217141 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.217236 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.217326 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.319950 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.319994 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.320002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.320018 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.320028 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.422477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.422521 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.422531 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.422546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.422561 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.525357 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.525420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.525428 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.525441 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.525451 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.627569 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.627827 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.627887 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.627944 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.627998 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.647067 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 23:47:27.846398306 +0000 UTC Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.657596 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.657736 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.657837 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.657916 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.658162 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:34 crc kubenswrapper[4773]: E0122 11:55:34.658477 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.730857 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.730935 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.730951 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.730979 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.731004 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.834068 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.834135 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.834153 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.834178 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.834196 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.936918 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.936994 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.937010 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.937032 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:34 crc kubenswrapper[4773]: I0122 11:55:34.937047 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:34Z","lastTransitionTime":"2026-01-22T11:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.039007 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.039232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.039319 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.039416 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.039472 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.141668 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.141710 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.141718 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.141733 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.141742 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.244257 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.244364 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.244388 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.244417 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.244435 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.346256 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.346309 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.346319 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.346331 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.346339 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.448866 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.448894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.448902 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.448914 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.448924 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.550943 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.550996 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.551005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.551019 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.551028 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.647547 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 17:07:54.145917096 +0000 UTC Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.653685 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.653735 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.653746 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.653767 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.654119 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.657931 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:35 crc kubenswrapper[4773]: E0122 11:55:35.658062 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.757116 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.757181 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.757195 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.757216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.757235 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.859576 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.859621 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.859631 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.859648 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.859659 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.962076 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.962375 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.962608 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.962710 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:35 crc kubenswrapper[4773]: I0122 11:55:35.962799 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:35Z","lastTransitionTime":"2026-01-22T11:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.065630 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.065679 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.065694 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.065717 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.065729 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.168422 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.168505 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.168517 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.168800 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.168832 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.272316 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.272357 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.272369 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.272391 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.272403 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.375631 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.375999 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.376075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.376163 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.376230 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.479405 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.479820 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.479885 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.479978 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.480047 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.583158 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.583234 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.583273 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.583307 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.583324 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.648684 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 20:20:26.983543783 +0000 UTC Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.658053 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.658137 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:36 crc kubenswrapper[4773]: E0122 11:55:36.658195 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:36 crc kubenswrapper[4773]: E0122 11:55:36.658277 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.658395 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:36 crc kubenswrapper[4773]: E0122 11:55:36.658462 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.685975 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.686242 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.686321 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.686393 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.686486 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.788504 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.788550 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.788560 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.788577 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.788587 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.890512 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.890738 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.890837 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.890908 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.890990 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.994560 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.995050 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.995131 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.995214 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:36 crc kubenswrapper[4773]: I0122 11:55:36.995309 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:36Z","lastTransitionTime":"2026-01-22T11:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.097963 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.098004 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.098014 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.098037 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.098048 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.200649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.200682 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.200690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.200702 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.200710 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.304565 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.304609 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.304620 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.304636 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.304648 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.407568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.407632 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.407647 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.407666 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.407676 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.510555 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.510613 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.510633 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.510661 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.510679 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.613605 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.613856 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.613920 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.613980 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.614045 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.649039 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 07:34:51.656759629 +0000 UTC Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.657502 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:37 crc kubenswrapper[4773]: E0122 11:55:37.657626 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.716499 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.716538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.716552 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.716570 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.716581 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.819076 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.819125 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.819135 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.819150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.819159 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.922349 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.922401 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.922413 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.922432 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:37 crc kubenswrapper[4773]: I0122 11:55:37.922445 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:37Z","lastTransitionTime":"2026-01-22T11:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.024679 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.024728 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.024741 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.024782 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.024794 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.126649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.126863 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.126950 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.127068 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.127128 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.229985 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.230040 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.230059 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.230079 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.230093 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.332889 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.332954 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.332977 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.333006 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.333023 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.435475 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.435545 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.435557 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.435576 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.435587 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.537970 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.538018 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.538029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.538046 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.538056 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.640036 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.640076 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.640086 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.640116 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.640125 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.649829 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 18:56:21.095038922 +0000 UTC Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.657176 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:38 crc kubenswrapper[4773]: E0122 11:55:38.657323 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.657452 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.657518 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:38 crc kubenswrapper[4773]: E0122 11:55:38.657591 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:38 crc kubenswrapper[4773]: E0122 11:55:38.657670 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.742748 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.742786 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.742795 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.742810 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.742821 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.845127 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.845170 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.845184 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.845199 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.845212 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.947420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.947496 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.947526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.947553 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:38 crc kubenswrapper[4773]: I0122 11:55:38.947572 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:38Z","lastTransitionTime":"2026-01-22T11:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.050440 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.050509 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.050531 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.050633 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.050655 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.153414 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.153452 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.153465 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.153486 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.153501 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.255604 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.255642 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.255651 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.255665 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.255674 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.359030 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.359071 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.359106 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.359123 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.359136 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.462233 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.462276 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.462305 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.462324 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.462335 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.565526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.565579 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.565603 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.565627 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.565644 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.650924 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 01:18:54.117012775 +0000 UTC Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.657511 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:39 crc kubenswrapper[4773]: E0122 11:55:39.657679 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.668589 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.668627 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.668637 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.668651 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.668662 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.771646 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.771775 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.771810 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.771832 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.771848 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.874327 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.874370 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.874381 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.874394 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.874404 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.977233 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.977313 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.977331 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.977352 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:39 crc kubenswrapper[4773]: I0122 11:55:39.977367 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:39Z","lastTransitionTime":"2026-01-22T11:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.079965 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.080004 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.080012 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.080029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.080038 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.181761 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.181823 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.181835 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.181850 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.181860 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.284005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.284057 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.284068 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.284080 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.284088 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.386456 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.386488 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.386497 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.386510 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.386521 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.489397 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.489459 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.489471 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.489486 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.489497 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.591638 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.591682 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.591690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.591707 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.591717 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.652197 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 06:39:49.711738995 +0000 UTC Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.657618 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.657731 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:40 crc kubenswrapper[4773]: E0122 11:55:40.657867 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.657925 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:40 crc kubenswrapper[4773]: E0122 11:55:40.658000 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:40 crc kubenswrapper[4773]: E0122 11:55:40.658088 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.693507 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.693561 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.693572 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.693589 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.693604 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.796024 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.796065 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.796074 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.796089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.796097 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.898106 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.898151 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.898202 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.898216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:40 crc kubenswrapper[4773]: I0122 11:55:40.898226 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:40Z","lastTransitionTime":"2026-01-22T11:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.001077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.001129 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.001140 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.001160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.001172 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.103327 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.103355 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.103362 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.103374 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.103382 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.131440 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:41 crc kubenswrapper[4773]: E0122 11:55:41.131601 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:41 crc kubenswrapper[4773]: E0122 11:55:41.131656 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:55:57.13163826 +0000 UTC m=+64.709754085 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.205009 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.205051 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.205060 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.205075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.205084 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.307550 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.307618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.307629 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.307647 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.307661 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.409626 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.409670 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.409680 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.409708 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.409721 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.512919 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.513014 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.513028 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.513049 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.513062 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.615506 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.615538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.615546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.615558 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.615566 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.652620 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 22:17:44.94296023 +0000 UTC Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.657357 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:41 crc kubenswrapper[4773]: E0122 11:55:41.657549 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.717597 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.717649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.717660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.717678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.717692 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.820646 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.820715 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.820727 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.820743 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.820755 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.926984 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.927123 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.927138 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.927160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:41 crc kubenswrapper[4773]: I0122 11:55:41.927180 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:41Z","lastTransitionTime":"2026-01-22T11:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.030968 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.031019 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.031030 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.031048 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.031060 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.134048 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.134111 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.134121 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.134142 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.134154 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.236689 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.236753 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.236765 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.236783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.236796 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.339613 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.339679 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.339694 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.339718 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.339735 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.443011 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.443077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.443090 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.443117 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.443163 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.545554 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.545644 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.545657 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.545674 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.545685 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.648674 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.648827 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:56:14.648778094 +0000 UTC m=+82.226893919 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.649120 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.649363 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.649376 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.649396 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.649409 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.653257 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 11:42:48.694251313 +0000 UTC Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.657591 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.657660 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.657681 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.657760 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.657929 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.658029 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.673611 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.686303 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.706831 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.724664 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.744684 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.750547 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.750594 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.750628 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.750671 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750738 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750760 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750762 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750806 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:56:14.750791268 +0000 UTC m=+82.328907093 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750822 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750843 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:56:14.750818499 +0000 UTC m=+82.328934344 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750852 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750765 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750869 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750874 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750912 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:56:14.750897881 +0000 UTC m=+82.329013786 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:42 crc kubenswrapper[4773]: E0122 11:55:42.750930 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:56:14.750922782 +0000 UTC m=+82.329038707 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.752850 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.752878 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.752888 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.752905 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.752920 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.758676 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.776122 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.789844 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.801493 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.815002 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.832266 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.843357 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.856344 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.856398 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.856588 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.856618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.856649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.856667 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.868729 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.888526 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.901885 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:42Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.959010 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.959061 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.959114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.959136 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:42 crc kubenswrapper[4773]: I0122 11:55:42.959149 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:42Z","lastTransitionTime":"2026-01-22T11:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.061881 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.061913 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.061922 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.061939 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.061956 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.165004 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.165089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.165105 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.165131 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.165145 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.267262 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.267365 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.267380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.267420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.267432 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.371110 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.371150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.371159 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.371174 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.371184 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.474610 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.474933 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.475050 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.475163 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.475251 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.578622 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.578664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.578673 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.578694 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.578707 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.653576 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 15:31:43.022840997 +0000 UTC Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.658066 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:43 crc kubenswrapper[4773]: E0122 11:55:43.658327 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.680831 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.680929 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.680954 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.680970 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.680982 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.784147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.784205 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.784219 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.784238 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.784251 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.886869 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.887167 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.887412 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.887538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.887627 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.990410 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.990697 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.990793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.990889 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:43 crc kubenswrapper[4773]: I0122 11:55:43.990974 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:43Z","lastTransitionTime":"2026-01-22T11:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.092877 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.093569 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.093612 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.093635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.093655 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.195763 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.195811 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.195823 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.195842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.195856 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.298587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.298639 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.298650 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.298670 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.298682 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.337598 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.337637 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.337648 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.337663 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.337674 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.349952 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:44Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.353021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.353048 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.353058 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.353074 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.353084 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.363805 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:44Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.368812 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.369016 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.369105 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.369198 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.369271 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.381779 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:44Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.387587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.387636 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.387650 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.387669 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.387686 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.400801 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:44Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.404970 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.405021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.405038 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.405059 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.405074 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.420568 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:44Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.420723 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.422428 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.422474 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.422487 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.422506 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.422517 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.524625 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.524718 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.524731 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.524749 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.525148 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.627577 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.627606 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.627615 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.627629 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.627638 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.654520 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 20:06:27.666955528 +0000 UTC Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.657955 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.658089 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.658040 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.658271 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.658389 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:44 crc kubenswrapper[4773]: E0122 11:55:44.658549 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.659180 4773 scope.go:117] "RemoveContainer" containerID="76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.731132 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.731171 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.731183 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.731202 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.731422 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.834793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.834853 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.834878 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.834906 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.834929 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.937322 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.937360 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.937376 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.937396 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:44 crc kubenswrapper[4773]: I0122 11:55:44.937412 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:44Z","lastTransitionTime":"2026-01-22T11:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.039727 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.039772 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.039785 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.039801 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.039814 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.077377 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/1.log" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.079971 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.080322 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.096646 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.117935 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.138522 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.142186 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.142224 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.142232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.142248 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.142259 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.153062 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.164394 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.175470 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.185152 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.201195 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.212597 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.225268 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.238535 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.244045 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.244084 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.244098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.244115 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.244126 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.267355 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.279687 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.289728 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.300889 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.313970 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:45Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.348164 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.348203 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.348212 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.348228 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.348239 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.450454 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.450492 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.450500 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.450514 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.450524 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.553561 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.553617 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.553635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.553656 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.553670 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.655069 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 19:24:39.322858277 +0000 UTC Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.656173 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.656222 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.656232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.656246 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.656255 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.657681 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:45 crc kubenswrapper[4773]: E0122 11:55:45.657821 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.758596 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.758651 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.758664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.758680 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.758692 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.860721 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.860767 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.860775 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.860794 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.860804 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.963410 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.963446 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.963459 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.963475 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:45 crc kubenswrapper[4773]: I0122 11:55:45.963485 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:45Z","lastTransitionTime":"2026-01-22T11:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.066036 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.066099 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.066110 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.066126 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.066141 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.083784 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/2.log" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.084335 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/1.log" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.086812 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c" exitCode=1 Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.086847 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.086878 4773 scope.go:117] "RemoveContainer" containerID="76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.087553 4773 scope.go:117] "RemoveContainer" containerID="f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c" Jan 22 11:55:46 crc kubenswrapper[4773]: E0122 11:55:46.087747 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.100279 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.100387 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.109381 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.115058 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.130401 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.143358 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.157700 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.168492 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.168527 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.168537 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.168553 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.168564 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.169487 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.186749 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.200246 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.210190 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.221363 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.234008 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.243084 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.252265 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.262715 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.270218 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.270252 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.270265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.270296 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.270308 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.277733 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.287559 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.298632 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.312851 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.323737 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.334610 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.345676 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.365802 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://76ce87ffd664f6e423623466b0f95771b8f0be55e36cf55e0215e30a65dc7047\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:23Z\\\",\\\"message\\\":\\\"ok: 6.321µs\\\\nI0122 11:55:22.710468 6184 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 11:55:22.710479 6184 factory.go:1336] Added *v1.Pod event handler 3\\\\nI0122 11:55:22.710504 6184 admin_network_policy_controller.go:133] Setting up event handlers for Admin Network Policy\\\\nI0122 11:55:22.710580 6184 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:22.710602 6184 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 11:55:22.710601 6184 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-marketplace/marketplace-operator-metrics\\\\\\\"}\\\\nI0122 11:55:22.710615 6184 services_controller.go:360] Finished syncing service marketplace-operator-metrics on namespace openshift-marketplace for network=default : 1.669825ms\\\\nI0122 11:55:22.710631 6184 services_controller.go:356] Processing sync for service openshift-marketplace/redhat-marketplace for network=default\\\\nF0122 11:55:22.710653 6184 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.372513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.372556 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.372572 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.372588 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.372599 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.380150 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.392348 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.405430 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.420827 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.434535 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.474339 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.475151 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.475188 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.475197 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.475214 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.475223 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.496453 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.512182 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.526134 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.542920 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.555257 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:46Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.578193 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.578250 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.578262 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.578309 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.578324 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.656264 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 11:27:52.961323858 +0000 UTC Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.657429 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.657482 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.657429 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:46 crc kubenswrapper[4773]: E0122 11:55:46.657547 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:46 crc kubenswrapper[4773]: E0122 11:55:46.657649 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:46 crc kubenswrapper[4773]: E0122 11:55:46.657765 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.680253 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.680321 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.680332 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.680348 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.680360 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.782377 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.782420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.782429 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.782445 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.782456 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.884942 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.884999 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.885012 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.885033 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.885043 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.987253 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.987305 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.987314 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.987327 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:46 crc kubenswrapper[4773]: I0122 11:55:46.987337 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:46Z","lastTransitionTime":"2026-01-22T11:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.089668 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.089718 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.089729 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.089746 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.089759 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.091984 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/2.log" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.096121 4773 scope.go:117] "RemoveContainer" containerID="f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c" Jan 22 11:55:47 crc kubenswrapper[4773]: E0122 11:55:47.096410 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.109333 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.125733 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.148068 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.161243 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.182264 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.191834 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.191881 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.191893 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.191908 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.191918 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.198028 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.211358 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.224720 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.237706 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.252238 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.263773 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.275678 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.288063 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.293824 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.293865 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.293877 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.293911 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.293923 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.301658 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.315227 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.328744 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.338887 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:47Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.396264 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.396348 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.396361 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.396389 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.396418 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.499430 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.499482 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.499495 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.499514 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.499525 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.602619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.602894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.603032 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.603208 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.603373 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.671520 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 10:26:44.677158458 +0000 UTC Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.672453 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:47 crc kubenswrapper[4773]: E0122 11:55:47.672634 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.705879 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.705922 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.705937 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.705957 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.705968 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.808441 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.808477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.808487 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.808504 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.808517 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.910604 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.910661 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.910672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.910700 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:47 crc kubenswrapper[4773]: I0122 11:55:47.910714 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:47Z","lastTransitionTime":"2026-01-22T11:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.012709 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.012738 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.012746 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.012760 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.012768 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.114880 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.114996 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.115021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.115052 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.115075 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.217221 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.217336 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.217360 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.217381 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.217396 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.319665 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.319734 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.319750 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.319773 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.319790 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.422054 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.422094 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.422103 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.422115 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.422126 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.524762 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.524860 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.524884 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.524926 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.524957 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.628524 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.628568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.628579 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.628600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.628615 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.658243 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.658270 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.658355 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:48 crc kubenswrapper[4773]: E0122 11:55:48.658597 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:48 crc kubenswrapper[4773]: E0122 11:55:48.658909 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:48 crc kubenswrapper[4773]: E0122 11:55:48.658968 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.671991 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 20:52:00.551070831 +0000 UTC Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.731036 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.731077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.731089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.731110 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.731129 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.834098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.834131 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.834141 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.834156 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.834173 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.941241 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.941302 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.941316 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.941333 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:48 crc kubenswrapper[4773]: I0122 11:55:48.941349 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:48Z","lastTransitionTime":"2026-01-22T11:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.044388 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.044739 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.044840 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.044925 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.045008 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.147882 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.147958 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.147977 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.147994 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.148004 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.250670 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.250739 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.250754 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.250782 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.250799 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.352909 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.352966 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.352974 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.352990 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.353002 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.456091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.456144 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.456160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.456183 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.456200 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.559358 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.559617 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.559689 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.559754 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.559846 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.657872 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:49 crc kubenswrapper[4773]: E0122 11:55:49.658046 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.662477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.662508 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.662515 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.662528 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.662536 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.672838 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 00:46:26.097070582 +0000 UTC Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.764535 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.764587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.764599 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.764618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.764630 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.868018 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.868096 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.868121 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.868162 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.868188 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.971262 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.971367 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.971386 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.971418 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:49 crc kubenswrapper[4773]: I0122 11:55:49.971437 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:49Z","lastTransitionTime":"2026-01-22T11:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.074374 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.074437 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.074454 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.074481 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.074498 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.176863 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.176929 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.176944 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.176967 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.176985 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.280358 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.280434 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.280450 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.280475 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.280499 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.383364 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.383400 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.383408 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.383424 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.383436 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.486910 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.487208 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.487230 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.487253 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.487269 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.590388 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.590448 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.590459 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.590479 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.590490 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.657891 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.658021 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.658098 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:50 crc kubenswrapper[4773]: E0122 11:55:50.658152 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:50 crc kubenswrapper[4773]: E0122 11:55:50.658228 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:50 crc kubenswrapper[4773]: E0122 11:55:50.658440 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.673210 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:19:04.779309609 +0000 UTC Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.693220 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.693337 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.693361 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.693390 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.693412 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.795756 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.795787 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.795798 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.795815 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.795826 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.898215 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.898248 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.898258 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.898272 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:50 crc kubenswrapper[4773]: I0122 11:55:50.898304 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:50Z","lastTransitionTime":"2026-01-22T11:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.000740 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.000785 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.000796 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.000813 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.000823 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.103660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.103708 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.103716 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.103730 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.103739 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.206741 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.206780 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.206790 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.206816 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.206826 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.308870 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.308915 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.308925 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.308941 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.308952 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.411195 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.411229 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.411262 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.411278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.411327 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.514089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.514134 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.514146 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.514164 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.514181 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.616785 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.616827 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.616836 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.616850 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.616859 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.657115 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:51 crc kubenswrapper[4773]: E0122 11:55:51.657391 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.673712 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 14:51:00.362700538 +0000 UTC Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.719869 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.719954 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.719973 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.719996 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.720013 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.822671 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.822709 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.822720 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.822737 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.822749 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.924808 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.924865 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.924877 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.924896 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:51 crc kubenswrapper[4773]: I0122 11:55:51.924914 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:51Z","lastTransitionTime":"2026-01-22T11:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.027350 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.027408 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.027420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.027438 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.027452 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.130643 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.130703 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.130718 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.130742 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.130759 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.233091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.233408 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.233559 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.233660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.233780 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.335780 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.336087 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.336189 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.336326 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.336426 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.439679 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.439726 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.439739 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.439755 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.439767 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.542306 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.542569 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.542656 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.542745 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.542810 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.647410 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.647453 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.647463 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.647478 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.647489 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.657272 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.657326 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.657326 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:52 crc kubenswrapper[4773]: E0122 11:55:52.657417 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:52 crc kubenswrapper[4773]: E0122 11:55:52.657578 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:52 crc kubenswrapper[4773]: E0122 11:55:52.657612 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.670203 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.673802 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 23:54:57.990260901 +0000 UTC Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.682861 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.695981 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.709446 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.722363 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.732520 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.749306 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.749345 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.749357 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.749373 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.749383 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.752620 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.765169 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.776931 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.790319 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.802386 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.811822 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.820877 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.834946 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.848097 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.853594 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.853665 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.853680 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.853697 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.853727 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.860329 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.874341 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:52Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.956076 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.956124 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.956134 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.956148 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:52 crc kubenswrapper[4773]: I0122 11:55:52.956158 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:52Z","lastTransitionTime":"2026-01-22T11:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.058891 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.058950 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.058971 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.058999 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.059020 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.162894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.162927 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.162937 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.162952 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.162963 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.265415 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.265451 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.265460 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.265472 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.265481 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.368788 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.368842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.368854 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.368870 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.368882 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.470952 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.470983 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.470991 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.471007 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.471046 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.573696 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.573748 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.573759 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.573778 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.573791 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.657324 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:53 crc kubenswrapper[4773]: E0122 11:55:53.657475 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.674986 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 06:43:50.724176967 +0000 UTC Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.677979 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.678024 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.678053 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.678073 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.678085 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.780932 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.780988 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.781005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.781028 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.781046 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.884055 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.884098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.884106 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.884120 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.884128 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.985942 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.985985 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.985996 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.986011 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:53 crc kubenswrapper[4773]: I0122 11:55:53.986021 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:53Z","lastTransitionTime":"2026-01-22T11:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.087955 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.088260 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.088385 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.088477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.088586 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.190793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.190820 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.190828 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.190842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.190852 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.294161 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.294239 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.294253 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.294274 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.294321 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.396181 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.396216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.396228 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.396244 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.396256 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.498454 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.498490 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.498503 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.498520 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.498530 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.601354 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.601421 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.601433 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.601451 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.601460 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.657381 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.657723 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.657937 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.657953 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.658070 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.658130 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.675708 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 06:13:28.639155064 +0000 UTC Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.703938 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.704003 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.704015 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.704029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.704038 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.805805 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.805841 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.805852 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.805868 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.805879 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.814074 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.814128 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.814139 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.814159 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.814171 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.827394 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:54Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.831345 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.831404 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.831414 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.831427 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.831436 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.843473 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:54Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.847557 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.847712 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.847818 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.847890 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.847947 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.860139 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:54Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.864415 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.864591 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.864662 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.864731 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.864796 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.875969 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:54Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.879377 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.879498 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.879566 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.879638 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.879711 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.892847 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:55:54Z is after 2025-08-24T17:21:41Z" Jan 22 11:55:54 crc kubenswrapper[4773]: E0122 11:55:54.893009 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.908389 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.908431 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.908448 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.908465 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:54 crc kubenswrapper[4773]: I0122 11:55:54.908477 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:54Z","lastTransitionTime":"2026-01-22T11:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.010787 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.010826 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.010836 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.010851 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.010862 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.113198 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.113253 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.113275 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.113350 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.113375 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.219428 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.219474 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.219484 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.219500 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.219510 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.321818 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.321868 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.321877 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.321892 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.321904 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.424308 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.424343 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.424351 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.424365 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.424375 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.528799 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.528835 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.528849 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.528864 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.528875 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.632758 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.633547 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.633894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.633917 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.633928 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.657690 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:55 crc kubenswrapper[4773]: E0122 11:55:55.657786 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.668086 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.676275 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 06:51:21.969325704 +0000 UTC Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.736302 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.736513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.736610 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.736681 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.736739 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.839210 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.839236 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.839243 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.839256 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.839264 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.942991 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.943027 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.943038 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.943054 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:55 crc kubenswrapper[4773]: I0122 11:55:55.943067 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:55Z","lastTransitionTime":"2026-01-22T11:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.045975 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.046031 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.046044 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.046066 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.046079 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.148980 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.149216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.149317 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.149441 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.149536 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.252259 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.252346 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.252360 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.252400 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.252412 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.355143 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.355191 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.355200 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.355216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.355243 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.457857 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.457886 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.457897 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.457910 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.457920 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.560386 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.560460 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.560471 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.560484 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.560495 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.657619 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:56 crc kubenswrapper[4773]: E0122 11:55:56.657760 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.657941 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:56 crc kubenswrapper[4773]: E0122 11:55:56.657985 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.658077 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:56 crc kubenswrapper[4773]: E0122 11:55:56.658124 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.662161 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.662193 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.662202 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.662214 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.662224 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.677409 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 22:10:12.463730439 +0000 UTC Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.764530 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.764562 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.764573 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.764587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.764598 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.867186 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.867221 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.867255 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.867272 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.867312 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.969948 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.970183 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.970249 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.970350 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:56 crc kubenswrapper[4773]: I0122 11:55:56.970424 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:56Z","lastTransitionTime":"2026-01-22T11:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.072925 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.073221 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.073337 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.073437 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.073532 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.176254 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.176658 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.176757 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.176840 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.176909 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.192323 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:57 crc kubenswrapper[4773]: E0122 11:55:57.192573 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:57 crc kubenswrapper[4773]: E0122 11:55:57.192689 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:56:29.192663499 +0000 UTC m=+96.770779324 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.279485 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.279525 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.279534 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.279546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.279555 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.381936 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.382239 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.382351 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.382463 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.382749 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.484785 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.484819 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.484827 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.484842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.484851 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.587258 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.587652 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.587842 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.588023 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.588251 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.657222 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:57 crc kubenswrapper[4773]: E0122 11:55:57.657397 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.677811 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 00:36:07.132486715 +0000 UTC Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.720908 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.720958 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.720969 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.720986 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.721002 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.824508 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.824561 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.824573 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.824594 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.824608 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.927418 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.927465 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.927476 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.927492 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:57 crc kubenswrapper[4773]: I0122 11:55:57.927505 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:57Z","lastTransitionTime":"2026-01-22T11:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.029494 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.029524 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.029532 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.029545 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.029554 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.133021 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.133061 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.133071 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.133087 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.133098 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.235035 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.235080 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.235091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.235106 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.235116 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.338454 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.338489 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.338497 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.338513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.338522 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.441311 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.441358 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.441371 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.441389 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.441400 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.543140 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.543184 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.543194 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.543209 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.543222 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.646161 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.646202 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.646210 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.646224 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.646233 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.657824 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.657879 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.657893 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:55:58 crc kubenswrapper[4773]: E0122 11:55:58.658030 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:55:58 crc kubenswrapper[4773]: E0122 11:55:58.658100 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:55:58 crc kubenswrapper[4773]: E0122 11:55:58.658211 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.678237 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 04:03:22.735390203 +0000 UTC Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.748854 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.748905 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.748916 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.748932 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.748944 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.851373 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.851410 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.851419 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.851434 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.851445 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.953659 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.953947 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.954011 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.954105 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:58 crc kubenswrapper[4773]: I0122 11:55:58.954202 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:58Z","lastTransitionTime":"2026-01-22T11:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.056617 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.056669 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.056678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.056695 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.056704 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.159151 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.159204 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.159214 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.159233 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.159247 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.261266 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.261318 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.261326 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.261341 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.261350 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.363588 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.363628 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.363640 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.363655 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.363665 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.466571 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.466634 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.466649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.466672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.466688 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.569408 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.569690 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.569783 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.569882 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.569968 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.657872 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:55:59 crc kubenswrapper[4773]: E0122 11:55:59.658330 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.672194 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.672321 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.672393 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.672459 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.672626 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.678457 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 02:06:08.477140871 +0000 UTC Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.775220 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.775453 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.775550 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.775676 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.775747 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.878851 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.878898 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.878910 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.878928 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.878942 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.982431 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.982514 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.982531 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.982579 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:55:59 crc kubenswrapper[4773]: I0122 11:55:59.982593 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:55:59Z","lastTransitionTime":"2026-01-22T11:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.085133 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.085169 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.085177 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.085190 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.085198 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.187706 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.187740 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.187749 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.187761 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.187770 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.290079 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.290114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.290121 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.290138 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.290146 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.392318 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.392366 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.392377 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.392393 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.392407 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.494625 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.494662 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.494677 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.494693 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.494720 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.596836 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.596869 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.596877 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.596891 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.596901 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.657997 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.658061 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.658135 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:00 crc kubenswrapper[4773]: E0122 11:56:00.658144 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:00 crc kubenswrapper[4773]: E0122 11:56:00.658327 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:00 crc kubenswrapper[4773]: E0122 11:56:00.658378 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.679579 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 22:14:16.813220858 +0000 UTC Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.700156 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.700192 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.700201 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.700216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.700226 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.802854 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.802891 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.802902 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.802920 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.802932 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.905587 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.905642 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.905656 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.905672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:00 crc kubenswrapper[4773]: I0122 11:56:00.905721 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:00Z","lastTransitionTime":"2026-01-22T11:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.007724 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.007766 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.007775 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.007790 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.007804 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.110694 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.110730 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.110740 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.110755 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.110765 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.143278 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/0.log" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.143561 4773 generic.go:334] "Generic (PLEG): container finished" podID="73fd13f5-159b-444d-9d03-1e5fdd943673" containerID="9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab" exitCode=1 Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.143588 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerDied","Data":"9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.143917 4773 scope.go:117] "RemoveContainer" containerID="9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.160902 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.172296 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.189196 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.202066 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.213498 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.213544 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.213553 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.213566 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.213575 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.214969 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.226502 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.239793 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.252188 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.264829 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.281575 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.293700 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.310741 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.315883 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.315931 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.315945 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.315968 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.315989 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.321694 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.334756 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.349082 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.362003 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.381773 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.394981 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:01Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.418433 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.418464 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.418475 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.418491 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.418502 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.520911 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.520944 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.520956 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.520971 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.520984 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.624126 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.624176 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.624186 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.624199 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.624207 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.656979 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:01 crc kubenswrapper[4773]: E0122 11:56:01.657402 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.657651 4773 scope.go:117] "RemoveContainer" containerID="f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c" Jan 22 11:56:01 crc kubenswrapper[4773]: E0122 11:56:01.657807 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.680122 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 00:16:12.37956645 +0000 UTC Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.726211 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.726275 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.726304 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.726319 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.726330 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.828919 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.828959 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.828970 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.828986 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.828998 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.931338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.931382 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.931395 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.931413 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:01 crc kubenswrapper[4773]: I0122 11:56:01.931424 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:01Z","lastTransitionTime":"2026-01-22T11:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.033717 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.033792 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.033805 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.033824 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.033841 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.136581 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.136632 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.136642 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.136662 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.136674 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.147312 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/0.log" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.147362 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerStarted","Data":"64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.161182 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.171514 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.181094 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.190433 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.206057 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.218549 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.227746 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.239871 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.239910 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.239920 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.239937 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.239949 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.239885 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.252549 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.267636 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.278412 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.288799 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.301676 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.313506 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.326923 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.342882 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.342930 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.342940 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.342959 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.342971 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.343015 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.358835 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.369847 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.445588 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.445622 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.445632 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.445649 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.445660 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.547945 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.547980 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.547989 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.548002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.548011 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.651812 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.651864 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.651872 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.651887 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.651899 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.657198 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:02 crc kubenswrapper[4773]: E0122 11:56:02.657390 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.657446 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.657510 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:02 crc kubenswrapper[4773]: E0122 11:56:02.657610 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:02 crc kubenswrapper[4773]: E0122 11:56:02.657712 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.672171 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.680680 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 03:49:30.165903273 +0000 UTC Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.685696 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.699157 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.711001 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.723237 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.737581 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.754170 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.754214 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.754222 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.754236 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.754247 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.754474 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.765397 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.785453 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.798867 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.811576 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.821413 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.836298 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.848634 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.856380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.857125 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.857151 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.857174 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.857186 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.860471 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.872209 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.881301 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.890905 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:02Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.959565 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.959603 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.959614 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.959630 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:02 crc kubenswrapper[4773]: I0122 11:56:02.959641 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:02Z","lastTransitionTime":"2026-01-22T11:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.062372 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.062439 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.062451 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.062474 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.062487 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.164791 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.164834 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.164846 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.164862 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.164872 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.267161 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.267200 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.267210 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.267234 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.267244 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.369220 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.369253 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.369265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.369296 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.369307 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.471504 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.471774 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.471861 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.471930 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.472006 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.573978 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.574011 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.574028 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.574046 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.574058 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.657142 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:03 crc kubenswrapper[4773]: E0122 11:56:03.657278 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.676946 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.676977 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.676985 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.676998 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.677007 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.681334 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 20:09:17.585775328 +0000 UTC Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.779838 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.779942 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.779981 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.780016 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.780046 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.882967 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.883009 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.883023 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.883039 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.883052 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.985260 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.985324 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.985359 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.985377 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:03 crc kubenswrapper[4773]: I0122 11:56:03.985389 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:03Z","lastTransitionTime":"2026-01-22T11:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.088208 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.088240 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.088249 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.088262 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.088270 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.191067 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.191105 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.191115 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.191129 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.191138 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.293966 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.294003 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.294014 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.294030 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.294042 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.397484 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.397534 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.397546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.397563 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.397575 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.499922 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.499965 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.499975 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.499995 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.500006 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.602237 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.602317 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.602329 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.602344 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.602354 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.657253 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:04 crc kubenswrapper[4773]: E0122 11:56:04.657385 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.657485 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.657547 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:04 crc kubenswrapper[4773]: E0122 11:56:04.657605 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:04 crc kubenswrapper[4773]: E0122 11:56:04.657720 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.681653 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 15:27:08.607189667 +0000 UTC Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.704491 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.704543 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.704556 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.704574 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.704584 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.807101 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.807147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.807159 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.807178 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.807190 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.910341 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.910396 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.910408 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.910429 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.910440 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.959993 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.960028 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.960040 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.960056 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.960068 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: E0122 11:56:04.973311 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:04Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.977190 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.977226 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.977238 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.977252 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.977263 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:04 crc kubenswrapper[4773]: E0122 11:56:04.989749 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:04Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.992938 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.993180 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.993263 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.993358 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:04 crc kubenswrapper[4773]: I0122 11:56:04.993430 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:04Z","lastTransitionTime":"2026-01-22T11:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: E0122 11:56:05.003298 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:05Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.006674 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.006781 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.006953 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.007161 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.007327 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: E0122 11:56:05.018846 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:05Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.022415 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.022470 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.022487 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.022501 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.022511 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: E0122 11:56:05.033122 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:05Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:05 crc kubenswrapper[4773]: E0122 11:56:05.033236 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.034592 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.034617 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.034625 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.034638 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.034647 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.137388 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.137467 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.137491 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.137517 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.137539 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.240049 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.240084 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.240092 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.240104 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.240116 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.342279 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.342338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.342346 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.342361 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.342370 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.445338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.445648 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.445711 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.445781 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.445838 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.548240 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.548538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.548628 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.548721 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.548801 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.650923 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.650956 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.650966 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.650979 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.650987 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.657660 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:05 crc kubenswrapper[4773]: E0122 11:56:05.657981 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.683127 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 19:30:39.477773248 +0000 UTC Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.753091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.753122 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.753131 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.753145 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.753156 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.855517 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.855589 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.855602 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.855619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.855631 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.957916 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.957994 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.958008 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.958043 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:05 crc kubenswrapper[4773]: I0122 11:56:05.958052 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:05Z","lastTransitionTime":"2026-01-22T11:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.059974 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.060005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.060014 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.060029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.060039 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.162570 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.162606 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.162618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.162635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.162646 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.265339 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.265371 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.265380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.265416 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.265425 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.368022 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.368089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.368101 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.368118 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.368129 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.470894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.470957 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.470981 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.471013 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.471030 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.573223 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.573337 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.573355 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.573369 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.573377 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.657459 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.657513 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:06 crc kubenswrapper[4773]: E0122 11:56:06.657589 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.657637 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:06 crc kubenswrapper[4773]: E0122 11:56:06.657685 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:06 crc kubenswrapper[4773]: E0122 11:56:06.657930 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.675311 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.675351 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.675364 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.675380 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.675392 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.683483 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 15:37:06.67488087 +0000 UTC Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.777338 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.777365 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.777372 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.777386 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.777394 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.880086 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.880121 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.880132 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.880146 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.880157 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.982017 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.982068 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.982079 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.982097 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:06 crc kubenswrapper[4773]: I0122 11:56:06.982108 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:06Z","lastTransitionTime":"2026-01-22T11:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.084464 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.084499 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.084507 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.084522 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.084530 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.187006 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.187065 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.187075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.187091 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.187103 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.289217 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.289302 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.289313 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.289333 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.289344 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.392455 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.392526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.392539 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.392563 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.392575 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.494935 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.494975 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.494987 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.495002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.495012 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.597906 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.597960 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.597972 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.597990 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.598001 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.657083 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:07 crc kubenswrapper[4773]: E0122 11:56:07.657205 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.684342 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 10:36:43.687749876 +0000 UTC Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.700020 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.700050 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.700060 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.700080 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.700091 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.803793 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.803915 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.803940 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.804011 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.804032 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.906580 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.906638 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.906651 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.906670 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:07 crc kubenswrapper[4773]: I0122 11:56:07.906682 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:07Z","lastTransitionTime":"2026-01-22T11:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.009160 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.009204 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.009213 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.009228 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.009238 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.112085 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.112128 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.112139 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.112157 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.112171 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.214647 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.214699 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.214713 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.214731 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.214741 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.317106 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.317143 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.317153 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.317170 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.317180 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.419360 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.419401 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.419416 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.419433 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.419444 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.522986 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.523312 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.523412 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.523514 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.523595 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.625959 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.626024 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.626038 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.626053 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.626065 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.657415 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.657540 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:08 crc kubenswrapper[4773]: E0122 11:56:08.657594 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:08 crc kubenswrapper[4773]: E0122 11:56:08.657537 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.657429 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:08 crc kubenswrapper[4773]: E0122 11:56:08.657656 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.685013 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 04:49:14.777589919 +0000 UTC Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.727997 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.728347 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.728556 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.728771 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.728930 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.832528 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.832581 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.832598 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.832619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.832637 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.934753 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.935055 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.935258 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.935512 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:08 crc kubenswrapper[4773]: I0122 11:56:08.935682 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:08Z","lastTransitionTime":"2026-01-22T11:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.038438 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.038722 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.038915 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.039060 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.039182 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.141557 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.141607 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.141618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.141635 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.141646 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.245403 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.245456 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.245477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.245505 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.245525 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.348817 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.348885 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.348908 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.348974 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.348997 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.451059 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.451123 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.451147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.451165 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.451177 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.554068 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.554107 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.554118 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.554135 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.554145 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.656506 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.656548 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.656559 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.656576 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.656588 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.657120 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:09 crc kubenswrapper[4773]: E0122 11:56:09.657241 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.686547 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 14:58:10.389914486 +0000 UTC Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.759518 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.759562 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.759572 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.759590 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.759600 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.862035 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.862090 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.862098 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.862114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.862125 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.965060 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.965111 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.965125 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.965146 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:09 crc kubenswrapper[4773]: I0122 11:56:09.965167 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:09Z","lastTransitionTime":"2026-01-22T11:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.068065 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.068361 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.068440 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.068502 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.068562 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.171232 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.171307 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.171322 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.171343 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.171356 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.273873 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.273952 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.273965 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.273985 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.273999 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.377982 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.378018 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.378029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.378045 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.378056 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.480921 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.480961 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.480969 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.480983 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.480991 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.583105 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.583425 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.583631 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.583800 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.584011 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.658078 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.658145 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.658084 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:10 crc kubenswrapper[4773]: E0122 11:56:10.658203 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:10 crc kubenswrapper[4773]: E0122 11:56:10.658269 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:10 crc kubenswrapper[4773]: E0122 11:56:10.658440 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.686636 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 15:48:00.1014461 +0000 UTC Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.686701 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.686755 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.686774 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.686801 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.686825 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.789407 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.789452 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.789464 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.789481 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.789492 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.892626 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.892683 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.892694 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.892709 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.892717 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.996612 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.996668 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.996680 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.996698 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:10 crc kubenswrapper[4773]: I0122 11:56:10.996716 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:10Z","lastTransitionTime":"2026-01-22T11:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.100732 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.100992 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.101112 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.101200 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.101280 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.203981 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.204042 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.204074 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.204094 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.204110 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.307466 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.307827 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.307987 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.308215 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.308384 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.411505 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.411591 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.411615 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.411645 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.411667 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.514196 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.514236 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.514248 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.514264 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.514275 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.616909 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.616955 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.616967 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.616985 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.616994 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.657669 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:11 crc kubenswrapper[4773]: E0122 11:56:11.657808 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.687255 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 23:11:23.643501121 +0000 UTC Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.719371 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.719604 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.719724 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.719858 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.719950 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.822854 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.823099 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.823201 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.823297 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.823400 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.925872 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.926142 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.926204 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.926265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:11 crc kubenswrapper[4773]: I0122 11:56:11.926382 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:11Z","lastTransitionTime":"2026-01-22T11:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.028400 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.028664 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.028764 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.028833 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.028889 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.131978 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.132019 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.132029 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.132044 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.132057 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.234229 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.234542 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.234613 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.234677 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.234737 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.336917 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.336956 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.336966 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.336979 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.336988 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.438969 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.439008 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.439017 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.439031 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.439040 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.541231 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.541531 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.541610 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.541696 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.541805 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.644361 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.644633 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.644702 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.644775 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.644878 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.657800 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.658080 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.657935 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:12 crc kubenswrapper[4773]: E0122 11:56:12.658363 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:12 crc kubenswrapper[4773]: E0122 11:56:12.658541 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:12 crc kubenswrapper[4773]: E0122 11:56:12.658740 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.671486 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.684341 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.687643 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 23:23:21.123586817 +0000 UTC Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.703099 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.716674 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.732993 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.746028 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.746901 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.747001 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.747075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.747152 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.747249 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.761563 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.771807 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.782722 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.792964 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.803865 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.817560 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.826947 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.838347 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.847784 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.849216 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.849248 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.849258 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.849274 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.849305 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.862229 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.873959 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.884796 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:12Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.952467 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.952708 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.952825 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.952897 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:12 crc kubenswrapper[4773]: I0122 11:56:12.952967 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:12Z","lastTransitionTime":"2026-01-22T11:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.055905 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.055952 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.055968 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.055988 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.055998 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.158245 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.158809 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.158881 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.158945 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.159007 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.261711 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.261747 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.261758 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.261773 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.261783 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.364012 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.364052 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.364063 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.364079 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.364094 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.466057 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.466097 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.466105 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.466118 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.466126 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.568627 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.568672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.568681 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.568700 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.568711 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.657680 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:13 crc kubenswrapper[4773]: E0122 11:56:13.658164 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.658488 4773 scope.go:117] "RemoveContainer" containerID="f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.670315 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.670482 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.670593 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.670663 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.670733 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.688231 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 03:56:50.963385745 +0000 UTC Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.773524 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.773582 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.773594 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.773621 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.773641 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.876137 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.876180 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.876192 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.876210 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.876228 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.978600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.978670 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.978680 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.978695 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:13 crc kubenswrapper[4773]: I0122 11:56:13.978707 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:13Z","lastTransitionTime":"2026-01-22T11:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.081715 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.081765 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.081776 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.081794 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.081807 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.183993 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.184078 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.184089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.184117 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.184130 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.286250 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.286276 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.286305 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.286318 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.286326 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.388951 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.388981 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.388989 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.389001 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.389008 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.491915 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.491941 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.491949 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.491961 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.491969 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.595838 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.595891 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.595903 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.595921 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.595933 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.657644 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.657740 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.657849 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.657668 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.658037 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.658182 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.689352 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 07:17:44.977396757 +0000 UTC Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.698538 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.698597 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.698619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.698650 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.698673 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.713151 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.713812 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:18.713789085 +0000 UTC m=+146.291904910 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.801730 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.801762 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.801772 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.801786 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.801795 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.814488 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.814536 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.814569 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.814593 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814635 4773 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814684 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814696 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814702 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:18.814682949 +0000 UTC m=+146.392798774 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814708 4773 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814737 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:18.814729111 +0000 UTC m=+146.392844936 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814785 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814821 4773 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814821 4773 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814837 4773 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814909 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:18.814887086 +0000 UTC m=+146.393002911 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 11:56:14 crc kubenswrapper[4773]: E0122 11:56:14.814929 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:18.814922027 +0000 UTC m=+146.393037852 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.904037 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.904077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.904085 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.904102 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:14 crc kubenswrapper[4773]: I0122 11:56:14.904114 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:14Z","lastTransitionTime":"2026-01-22T11:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.006267 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.006356 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.006364 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.006377 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.006386 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.108465 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.108502 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.108512 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.108525 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.108535 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.185801 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/2.log" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.188811 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.189224 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.202892 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.210713 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.210740 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.210748 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.210760 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.210769 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.218726 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.229680 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.240129 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.250998 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.260232 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.273917 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.284575 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.296406 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.296457 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.296468 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.296485 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.296497 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.296593 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.305350 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.307176 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.313924 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.313975 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.313987 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.314005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.314021 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.316227 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.327561 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.328632 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.330577 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.330618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.330627 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.330641 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.330650 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.341065 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.343688 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.347619 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.347658 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.347667 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.347682 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.347691 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.353204 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.359642 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.363935 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.364003 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.364015 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.364040 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.364055 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.364169 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.376788 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"fa02e8d1-94c2-4a40-8bcd-0ddcf1ff5013\\\",\\\"systemUUID\\\":\\\"52b2f704-3175-40e6-96da-8c8b45b50226\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.376981 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.378728 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.378763 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.378774 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.378789 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.378802 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.385397 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.403262 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.423261 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.481475 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.481518 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.481528 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.481546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.481556 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.584122 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.584152 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.584162 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.584177 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.584188 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.657368 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:15 crc kubenswrapper[4773]: E0122 11:56:15.657530 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.685862 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.685894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.685902 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.685914 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.685922 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.690184 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 23:24:34.535390081 +0000 UTC Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.789002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.789062 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.789076 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.789095 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.789108 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.891482 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.891522 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.891531 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.891546 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.891556 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.994457 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.994513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.994528 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.994549 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:15 crc kubenswrapper[4773]: I0122 11:56:15.994559 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:15Z","lastTransitionTime":"2026-01-22T11:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.096864 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.096907 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.096916 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.096931 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.096940 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.193621 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/3.log" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.194573 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/2.log" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.197180 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" exitCode=1 Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.197211 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.197247 4773 scope.go:117] "RemoveContainer" containerID="f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.198514 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 11:56:16 crc kubenswrapper[4773]: E0122 11:56:16.198922 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.200482 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.200511 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.200525 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.200540 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.200550 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.213224 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.226188 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.237352 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.247869 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.268376 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c8263709324b069b1f6b61e6ff1397159a4d25761eabaec9f2e7eec076e92c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:55:45Z\\\",\\\"message\\\":\\\"map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.493862 6468 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 11:55:45.494522 6468 ovnkube.go:599] Stopped ovnkube\\\\nI0122 11:55:45.494555 6468 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 11:55:45.494625 6468 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:16Z\\\",\\\"message\\\":\\\"56:15.859974 6895 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0122 11:56:15.859991 6895 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0122 11:56:15.859995 6895 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0122 11:56:15.859999 6895 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF0122 11:56:15.859868 6895 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:56:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.290112 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.302594 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.304206 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.304257 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.304269 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.304306 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.304319 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.316956 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.328895 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.343333 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.355277 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.366979 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.376804 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.387928 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.402960 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.406862 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.406898 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.406911 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.406932 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.406947 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.416462 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.429439 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.439556 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:16Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.509660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.509723 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.509736 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.509752 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.509764 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.612191 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.612237 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.612249 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.612267 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.612279 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.658051 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.658105 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:16 crc kubenswrapper[4773]: E0122 11:56:16.658166 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.658056 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:16 crc kubenswrapper[4773]: E0122 11:56:16.658239 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:16 crc kubenswrapper[4773]: E0122 11:56:16.658387 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.690934 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 00:59:18.133575538 +0000 UTC Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.715137 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.715190 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.715201 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.715218 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.715230 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.817847 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.817885 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.817896 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.817911 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.817921 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.920007 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.920053 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.920062 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.920078 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:16 crc kubenswrapper[4773]: I0122 11:56:16.920089 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:16Z","lastTransitionTime":"2026-01-22T11:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.022653 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.022697 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.022705 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.022721 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.022732 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.124304 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.124345 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.124355 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.124369 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.124381 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.201964 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/3.log" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.204919 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 11:56:17 crc kubenswrapper[4773]: E0122 11:56:17.205063 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.215664 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-shp5z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"656fa143-a073-42b1-93cf-e093ff7c285c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://905608d4d59e9376540d2c1affa8c7c0e0416e92d98e6e99f4c3bd845be860aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lznvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:10Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-shp5z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.226509 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.226555 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.226566 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.226584 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.226603 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.227546 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6349121-8063-49ee-91ce-cdb8ba45ffdb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75f708c0004ed30f729db1f335241b3005caed61f0d6dab2ac61685f14c189c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dbf974e110262739b5ac93b528d8acd1f01163d825c0351bf4882e36548efdb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea5835dddfbe2535ef6ec62405f9f325318d3951aaec646c967edbe896e8f827\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b6b68ea1d6507f13f5aad483ade14518e34036a6f2f061a1cfa016a41bb91e94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.240568 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b71e36d95ff388b827f8d20358b4774c6057ef5886e86c5c3cd138c0e699e38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72d72c57ed1d47bb9d7952ee03ca25c78c36afe017e8ed5cd0a06dbfc926a3dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.255432 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.269092 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsndt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"73fd13f5-159b-444d-9d03-1e5fdd943673\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:56:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:00Z\\\",\\\"message\\\":\\\"2026-01-22T11:55:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13\\\\n2026-01-22T11:55:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7a3a9920-101a-4d8f-845e-22899bfefe13 to /host/opt/cni/bin/\\\\n2026-01-22T11:55:15Z [verbose] multus-daemon started\\\\n2026-01-22T11:55:15Z [verbose] Readiness Indicator file check\\\\n2026-01-22T11:56:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:56:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jhmh2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsndt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.280886 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-258ll" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"db4bd0eb-fc9a-4db0-b1fb-7f01c9f4732c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe85400fea2feedbbd169e004ae0a1887ec946004d369096bae41b47e3451737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nhfhr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:14Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-258ll\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.300279 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T11:56:16Z\\\",\\\"message\\\":\\\"56:15.859974 6895 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0122 11:56:15.859991 6895 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0122 11:56:15.859995 6895 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0122 11:56:15.859999 6895 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF0122 11:56:15.859868 6895 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:15Z is after 2025-08-24T17:2\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:56:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4szgh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-pplsq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.314442 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d47793ffbfaa22cddcfe0e2298d893a602f0ee71261843ee47d25db914c257b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.326277 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.329164 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.329219 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.329228 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.329242 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.329252 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.359978 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.379560 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5a0935c-0094-42bc-a9e7-bf3fd046e23d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bb4b53ec8478921e15c40420a6140fce5bf29cda39aadfc34d4ef1037c77e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7lphw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-hhxm5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.392342 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2c33aa-28a3-4929-9cd5-d7022c5f0eb8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d183042e0709480c05e56952a6021a8d816c3c4136509ae049ef868457db54\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ddcc51dc7dfeb1f18990d5a1daa85fa7ef773f90b28fc012ac683871d66a634\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kmcm5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8xrc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.402645 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2bktm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-5tqwr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.413636 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b887083d-0660-48e3-b598-50b91accaba8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02f3506aac4e8c37f89ec872d00d23ddd607f4c68dff69be58cfa6f25f1fe630\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8430203f1352a74bf737881a329f4258d44c1ac3c842a1db5596ce8cebb53ed6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71f85ca61530c37f00b61991ad7ecc095ee06961b71dc26366e1233aabb19ed3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.426545 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8914af7c-cff6-4066-ba3f-47ff0d1676f3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c3bc0732e748f8a71ec695e3bf7a436347f7f2de2c14233295e157d42182295\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1a2d37afbe5a7d81a05ce577e8a66d1de8b6b4d34328073e97fcfaec06f35297\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.431751 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.431785 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.431796 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.431812 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.431820 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.438483 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"839760ed-cfeb-44fa-b15d-0a8c26187acc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:54:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T11:55:10Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 11:55:05.260470 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 11:55:05.266330 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3093590082/tls.crt::/tmp/serving-cert-3093590082/tls.key\\\\\\\"\\\\nI0122 11:55:10.650228 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 11:55:10.653637 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 11:55:10.653657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 11:55:10.653678 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 11:55:10.653684 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 11:55:10.660562 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 11:55:10.660586 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0122 11:55:10.660587 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0122 11:55:10.660591 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 11:55:10.660617 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 11:55:10.660620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 11:55:10.660623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 11:55:10.660627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0122 11:55:10.663432 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:54:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:54:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:54:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:54:52Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.448898 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3db94a65252a2df482500d5f648ac8807d2144ce028102806f094317b59ec959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.460511 4773 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9bldd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f173bdf-8981-4274-8d1b-ec68a44fefa7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T11:55:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f285de0ae8e337cd9deef2429cd811319248cfaa25ace978110624edfa6eec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T11:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://348714dbb116e558b4b270f199f015f2c4be92d4b648a7973d6be518b3823285\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b4c7882a5de2b0b0620ba2b56f7243852fe2df137705a4dc5757311f5a39d7b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e49cc80d576f62a431f6a8dba5d1d91f0b19f00700c52ce662ac30045b070733\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97f483f2369f1bf60f9f954e17d39f94a0aa06b3effafbddcb90fcd3d4fe518e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d6fdb1247a1c1bf64e1c1783a581b54df760b9cbe5029c5aed256b9813e5e1dc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da1b8bb1f38c705a13ecbbfd8fd43d86769c516ad20502dacda49a499cabb194\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T11:55:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T11:55:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nfndz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T11:55:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9bldd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T11:56:17Z is after 2025-08-24T17:21:41Z" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.534574 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.534603 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.534623 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.534637 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.534647 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.637494 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.637558 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.637578 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.637600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.637617 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.657100 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:17 crc kubenswrapper[4773]: E0122 11:56:17.657226 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.691705 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 13:40:41.329129779 +0000 UTC Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.740463 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.740594 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.740628 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.740656 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.740678 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.843923 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.843977 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.843999 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.844025 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.844046 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.946410 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.946470 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.946487 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.946510 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:17 crc kubenswrapper[4773]: I0122 11:56:17.946529 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:17Z","lastTransitionTime":"2026-01-22T11:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.048140 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.048187 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.048197 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.048211 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.048220 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.151015 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.151095 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.151108 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.151124 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.151137 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.254698 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.254748 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.254763 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.254785 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.254798 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.357776 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.357853 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.357870 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.357897 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.357913 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.461369 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.461441 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.461463 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.461486 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.461503 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.563797 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.563903 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.563925 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.563954 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.563976 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.658155 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.658260 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:18 crc kubenswrapper[4773]: E0122 11:56:18.658415 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.658503 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:18 crc kubenswrapper[4773]: E0122 11:56:18.658742 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:18 crc kubenswrapper[4773]: E0122 11:56:18.658915 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.666829 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.666877 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.666894 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.666918 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.666933 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.692165 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 17:51:39.05029865 +0000 UTC Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.769777 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.769815 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.769824 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.769839 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.769849 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.872788 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.872851 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.872874 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.872899 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.872914 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.975602 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.975660 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.975672 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.975689 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:18 crc kubenswrapper[4773]: I0122 11:56:18.975701 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:18Z","lastTransitionTime":"2026-01-22T11:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.077572 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.077623 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.077632 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.077646 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.077655 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.179435 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.179505 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.179539 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.179568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.179588 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.281522 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.281554 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.281561 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.281575 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.281586 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.384094 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.384125 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.384132 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.384147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.384157 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.486897 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.486949 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.486961 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.486982 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.487002 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.589676 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.589713 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.589721 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.589737 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.589745 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.657877 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:19 crc kubenswrapper[4773]: E0122 11:56:19.658015 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.692166 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.692220 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.692231 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.692250 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.692261 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 22:52:20.116145228 +0000 UTC Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.692261 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.795761 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.795799 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.795811 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.795827 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.795839 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.898895 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.898938 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.898951 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.898968 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:19 crc kubenswrapper[4773]: I0122 11:56:19.898980 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:19Z","lastTransitionTime":"2026-01-22T11:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.002568 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.002620 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.002632 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.002648 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.002663 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.104703 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.104760 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.104782 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.104801 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.104815 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.207562 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.207608 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.207618 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.207633 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.207646 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.310275 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.310346 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.310357 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.310373 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.310382 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.413976 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.414024 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.414039 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.414059 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.414072 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.517866 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.517932 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.517946 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.517968 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.517984 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.620469 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.620515 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.620526 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.620543 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.620557 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.657260 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.657402 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.657524 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:20 crc kubenswrapper[4773]: E0122 11:56:20.657642 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:20 crc kubenswrapper[4773]: E0122 11:56:20.657807 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:20 crc kubenswrapper[4773]: E0122 11:56:20.657862 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.693395 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 16:13:42.385644818 +0000 UTC Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.723375 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.723432 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.723441 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.723458 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.723468 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.826556 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.826639 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.826652 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.826677 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.826692 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.929420 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.929480 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.929496 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.929519 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:20 crc kubenswrapper[4773]: I0122 11:56:20.929535 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:20Z","lastTransitionTime":"2026-01-22T11:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.032342 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.032437 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.032464 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.032494 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.032511 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.135172 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.135251 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.135265 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.135297 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.135308 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.238243 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.238334 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.238351 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.238375 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.238393 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.340067 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.340150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.340168 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.340187 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.340202 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.442900 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.442942 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.442952 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.442966 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.442976 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.544801 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.544856 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.544865 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.544881 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.544890 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.648114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.648152 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.648161 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.648180 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.648196 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.656889 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:21 crc kubenswrapper[4773]: E0122 11:56:21.656995 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.694013 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 15:09:28.231997146 +0000 UTC Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.751000 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.751072 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.751087 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.751150 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.751171 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.853977 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.854034 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.854047 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.854075 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.854088 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.956180 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.956223 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.956235 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.956250 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:21 crc kubenswrapper[4773]: I0122 11:56:21.956263 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:21Z","lastTransitionTime":"2026-01-22T11:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.059224 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.059314 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.059333 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.059359 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.059377 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.162631 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.162682 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.162694 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.162714 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.162728 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.266378 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.266452 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.266477 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.266509 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.266538 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.369491 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.369522 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.369549 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.369562 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.369574 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.472468 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.472502 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.472510 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.472523 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.472530 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.576615 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.576710 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.576735 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.576762 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.576790 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.657527 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:22 crc kubenswrapper[4773]: E0122 11:56:22.657674 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.657534 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.657741 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:22 crc kubenswrapper[4773]: E0122 11:56:22.657809 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:22 crc kubenswrapper[4773]: E0122 11:56:22.657912 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.682513 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.682549 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.682558 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.682574 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.682586 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.694194 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=36.694164259 podStartE2EDuration="36.694164259s" podCreationTimestamp="2026-01-22 11:55:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.67665767 +0000 UTC m=+90.254773495" watchObservedRunningTime="2026-01-22 11:56:22.694164259 +0000 UTC m=+90.272280104" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.694364 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 03:58:48.377090873 +0000 UTC Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.737684 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-tsndt" podStartSLOduration=71.737660623 podStartE2EDuration="1m11.737660623s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.723071275 +0000 UTC m=+90.301187100" watchObservedRunningTime="2026-01-22 11:56:22.737660623 +0000 UTC m=+90.315776458" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.756773 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-258ll" podStartSLOduration=71.756749301 podStartE2EDuration="1m11.756749301s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.738374315 +0000 UTC m=+90.316490150" watchObservedRunningTime="2026-01-22 11:56:22.756749301 +0000 UTC m=+90.334865126" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.784860 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.784901 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.784914 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.784932 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.784942 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.825365 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podStartSLOduration=71.825345492 podStartE2EDuration="1m11.825345492s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.799498342 +0000 UTC m=+90.377614187" watchObservedRunningTime="2026-01-22 11:56:22.825345492 +0000 UTC m=+90.403461317" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.853373 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=65.853353191 podStartE2EDuration="1m5.853353191s" podCreationTimestamp="2026-01-22 11:55:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.842051226 +0000 UTC m=+90.420167051" watchObservedRunningTime="2026-01-22 11:56:22.853353191 +0000 UTC m=+90.431469016" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.853554 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=27.853547737 podStartE2EDuration="27.853547737s" podCreationTimestamp="2026-01-22 11:55:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.852349929 +0000 UTC m=+90.430465754" watchObservedRunningTime="2026-01-22 11:56:22.853547737 +0000 UTC m=+90.431663552" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.886008 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=72.885993334 podStartE2EDuration="1m12.885993334s" podCreationTimestamp="2026-01-22 11:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.870311872 +0000 UTC m=+90.448427717" watchObservedRunningTime="2026-01-22 11:56:22.885993334 +0000 UTC m=+90.464109159" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.887275 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.887350 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.887366 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.887385 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.887397 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.904302 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-9bldd" podStartSLOduration=71.904272607 podStartE2EDuration="1m11.904272607s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.903212264 +0000 UTC m=+90.481328099" watchObservedRunningTime="2026-01-22 11:56:22.904272607 +0000 UTC m=+90.482388432" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.917303 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8xrc6" podStartSLOduration=70.917274915 podStartE2EDuration="1m10.917274915s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.916826051 +0000 UTC m=+90.494941886" watchObservedRunningTime="2026-01-22 11:56:22.917274915 +0000 UTC m=+90.495390740" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.990031 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.990083 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.990093 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.990108 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:22 crc kubenswrapper[4773]: I0122 11:56:22.990118 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:22Z","lastTransitionTime":"2026-01-22T11:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.091923 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.091993 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.092002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.092016 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.092026 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.195219 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.195276 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.195313 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.195336 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.195351 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.297407 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.297655 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.297727 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.297802 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.297869 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.400118 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.400489 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.400634 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.400782 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.400889 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.503688 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.503735 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.503746 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.503763 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.503777 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.605936 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.605970 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.605981 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.605995 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.606005 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.658502 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:23 crc kubenswrapper[4773]: E0122 11:56:23.658689 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.695178 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 05:34:37.50593753 +0000 UTC Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.708124 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.708409 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.708439 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.708461 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.708476 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.810399 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.810451 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.810464 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.810482 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.810493 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.912983 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.913037 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.913052 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.913077 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:23 crc kubenswrapper[4773]: I0122 11:56:23.913094 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:23Z","lastTransitionTime":"2026-01-22T11:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.015986 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.016024 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.016034 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.016050 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.016060 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.118748 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.118809 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.118832 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.118860 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.118882 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.221089 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.221121 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.221131 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.221144 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.221154 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.323119 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.323156 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.323167 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.323184 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.323196 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.425114 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.425149 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.425157 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.425170 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.425180 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.527451 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.527529 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.527553 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.527586 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.527611 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.630706 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.630829 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.630864 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.630891 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.630916 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.657459 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:24 crc kubenswrapper[4773]: E0122 11:56:24.657687 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.657736 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.657486 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:24 crc kubenswrapper[4773]: E0122 11:56:24.657934 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:24 crc kubenswrapper[4773]: E0122 11:56:24.658132 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.695546 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 17:46:07.673950605 +0000 UTC Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.733928 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.734002 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.734035 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.734063 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.734084 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.837062 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.837109 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.837125 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.837147 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.837164 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.940151 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.940208 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.940226 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.940249 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:24 crc kubenswrapper[4773]: I0122 11:56:24.940268 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:24Z","lastTransitionTime":"2026-01-22T11:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.043278 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.043375 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.043388 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.043419 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.043436 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:25Z","lastTransitionTime":"2026-01-22T11:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.146511 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.146567 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.146581 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.146600 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.146612 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:25Z","lastTransitionTime":"2026-01-22T11:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.250597 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.250658 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.250678 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.250707 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.250734 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:25Z","lastTransitionTime":"2026-01-22T11:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.353299 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.353335 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.353367 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.353383 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.353393 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:25Z","lastTransitionTime":"2026-01-22T11:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.455751 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.456071 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.456221 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.456413 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.456558 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:25Z","lastTransitionTime":"2026-01-22T11:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.531315 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.531567 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.531792 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.531976 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.532153 4773 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T11:56:25Z","lastTransitionTime":"2026-01-22T11:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.573482 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-shp5z" podStartSLOduration=75.57345797 podStartE2EDuration="1m15.57345797s" podCreationTimestamp="2026-01-22 11:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:22.936125216 +0000 UTC m=+90.514241061" watchObservedRunningTime="2026-01-22 11:56:25.57345797 +0000 UTC m=+93.151573825" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.573782 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq"] Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.574275 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.575634 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.576100 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.577182 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.578328 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.657743 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:25 crc kubenswrapper[4773]: E0122 11:56:25.657921 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.696210 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 07:51:12.868322064 +0000 UTC Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.696351 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.706683 4773 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.728509 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.728723 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.728827 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.728929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.729039 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.830382 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.830435 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.830463 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.830496 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.830527 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.831080 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.831171 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.831925 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.836114 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.852729 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8a5047ad-0733-4e2a-aa4c-8831dba9ab7c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-pqjlq\" (UID: \"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: I0122 11:56:25.891125 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" Jan 22 11:56:25 crc kubenswrapper[4773]: W0122 11:56:25.910182 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a5047ad_0733_4e2a_aa4c_8831dba9ab7c.slice/crio-dd46aed8c5aa6279d1cb349fecdc11b7124e0acdfc13005e29a1fcfcd5aee103 WatchSource:0}: Error finding container dd46aed8c5aa6279d1cb349fecdc11b7124e0acdfc13005e29a1fcfcd5aee103: Status 404 returned error can't find the container with id dd46aed8c5aa6279d1cb349fecdc11b7124e0acdfc13005e29a1fcfcd5aee103 Jan 22 11:56:26 crc kubenswrapper[4773]: I0122 11:56:26.239005 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" event={"ID":"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c","Type":"ContainerStarted","Data":"9becd04a153637ade6b5a9107f42a22bb42ee66916a8fd2e92a7502ac70ef23d"} Jan 22 11:56:26 crc kubenswrapper[4773]: I0122 11:56:26.239071 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" event={"ID":"8a5047ad-0733-4e2a-aa4c-8831dba9ab7c","Type":"ContainerStarted","Data":"dd46aed8c5aa6279d1cb349fecdc11b7124e0acdfc13005e29a1fcfcd5aee103"} Jan 22 11:56:26 crc kubenswrapper[4773]: I0122 11:56:26.255341 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-pqjlq" podStartSLOduration=75.255323312 podStartE2EDuration="1m15.255323312s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:26.25461027 +0000 UTC m=+93.832726115" watchObservedRunningTime="2026-01-22 11:56:26.255323312 +0000 UTC m=+93.833439137" Jan 22 11:56:26 crc kubenswrapper[4773]: I0122 11:56:26.657697 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:26 crc kubenswrapper[4773]: I0122 11:56:26.657784 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:26 crc kubenswrapper[4773]: I0122 11:56:26.657724 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:26 crc kubenswrapper[4773]: E0122 11:56:26.657929 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:26 crc kubenswrapper[4773]: E0122 11:56:26.658042 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:26 crc kubenswrapper[4773]: E0122 11:56:26.658150 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:27 crc kubenswrapper[4773]: I0122 11:56:27.657455 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:27 crc kubenswrapper[4773]: E0122 11:56:27.657692 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:28 crc kubenswrapper[4773]: I0122 11:56:28.657266 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:28 crc kubenswrapper[4773]: I0122 11:56:28.657328 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:28 crc kubenswrapper[4773]: I0122 11:56:28.657348 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:28 crc kubenswrapper[4773]: E0122 11:56:28.657460 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:28 crc kubenswrapper[4773]: E0122 11:56:28.657557 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:28 crc kubenswrapper[4773]: E0122 11:56:28.657624 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:29 crc kubenswrapper[4773]: I0122 11:56:29.267733 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:29 crc kubenswrapper[4773]: E0122 11:56:29.267899 4773 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:56:29 crc kubenswrapper[4773]: E0122 11:56:29.267969 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs podName:c1ed4b8d-def5-474b-8629-cd0bae7e49a6 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:33.267948024 +0000 UTC m=+160.846063849 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs") pod "network-metrics-daemon-5tqwr" (UID: "c1ed4b8d-def5-474b-8629-cd0bae7e49a6") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 11:56:29 crc kubenswrapper[4773]: I0122 11:56:29.658069 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:29 crc kubenswrapper[4773]: E0122 11:56:29.658339 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:30 crc kubenswrapper[4773]: I0122 11:56:30.657994 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:30 crc kubenswrapper[4773]: I0122 11:56:30.658061 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:30 crc kubenswrapper[4773]: I0122 11:56:30.658126 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:30 crc kubenswrapper[4773]: E0122 11:56:30.658323 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:30 crc kubenswrapper[4773]: E0122 11:56:30.658429 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:30 crc kubenswrapper[4773]: E0122 11:56:30.658550 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:30 crc kubenswrapper[4773]: I0122 11:56:30.659668 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 11:56:30 crc kubenswrapper[4773]: E0122 11:56:30.659912 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:56:31 crc kubenswrapper[4773]: I0122 11:56:31.657663 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:31 crc kubenswrapper[4773]: E0122 11:56:31.657793 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:32 crc kubenswrapper[4773]: I0122 11:56:32.658574 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:32 crc kubenswrapper[4773]: I0122 11:56:32.658574 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:32 crc kubenswrapper[4773]: E0122 11:56:32.659637 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:32 crc kubenswrapper[4773]: I0122 11:56:32.659649 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:32 crc kubenswrapper[4773]: E0122 11:56:32.659774 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:32 crc kubenswrapper[4773]: E0122 11:56:32.659829 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:32 crc kubenswrapper[4773]: I0122 11:56:32.676854 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 22 11:56:33 crc kubenswrapper[4773]: I0122 11:56:33.657627 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:33 crc kubenswrapper[4773]: E0122 11:56:33.657734 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:34 crc kubenswrapper[4773]: I0122 11:56:34.657233 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:34 crc kubenswrapper[4773]: I0122 11:56:34.657355 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:34 crc kubenswrapper[4773]: I0122 11:56:34.657355 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:34 crc kubenswrapper[4773]: E0122 11:56:34.657491 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:34 crc kubenswrapper[4773]: E0122 11:56:34.657934 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:34 crc kubenswrapper[4773]: E0122 11:56:34.658189 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:35 crc kubenswrapper[4773]: I0122 11:56:35.657120 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:35 crc kubenswrapper[4773]: E0122 11:56:35.657399 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:36 crc kubenswrapper[4773]: I0122 11:56:36.657000 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:36 crc kubenswrapper[4773]: E0122 11:56:36.657180 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:36 crc kubenswrapper[4773]: I0122 11:56:36.657014 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:36 crc kubenswrapper[4773]: I0122 11:56:36.657242 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:36 crc kubenswrapper[4773]: E0122 11:56:36.657438 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:36 crc kubenswrapper[4773]: E0122 11:56:36.657533 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:37 crc kubenswrapper[4773]: I0122 11:56:37.657693 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:37 crc kubenswrapper[4773]: E0122 11:56:37.657879 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:38 crc kubenswrapper[4773]: I0122 11:56:38.658073 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:38 crc kubenswrapper[4773]: I0122 11:56:38.658105 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:38 crc kubenswrapper[4773]: E0122 11:56:38.659687 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:38 crc kubenswrapper[4773]: E0122 11:56:38.659121 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:38 crc kubenswrapper[4773]: I0122 11:56:38.658270 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:38 crc kubenswrapper[4773]: E0122 11:56:38.659774 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:39 crc kubenswrapper[4773]: I0122 11:56:39.657689 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:39 crc kubenswrapper[4773]: E0122 11:56:39.657894 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:40 crc kubenswrapper[4773]: I0122 11:56:40.657264 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:40 crc kubenswrapper[4773]: I0122 11:56:40.657345 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:40 crc kubenswrapper[4773]: E0122 11:56:40.657395 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:40 crc kubenswrapper[4773]: E0122 11:56:40.657489 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:40 crc kubenswrapper[4773]: I0122 11:56:40.657526 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:40 crc kubenswrapper[4773]: E0122 11:56:40.657568 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:41 crc kubenswrapper[4773]: I0122 11:56:41.657590 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:41 crc kubenswrapper[4773]: E0122 11:56:41.657728 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:42 crc kubenswrapper[4773]: I0122 11:56:42.657895 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:42 crc kubenswrapper[4773]: I0122 11:56:42.659141 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:42 crc kubenswrapper[4773]: E0122 11:56:42.659488 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:42 crc kubenswrapper[4773]: E0122 11:56:42.659803 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:42 crc kubenswrapper[4773]: I0122 11:56:42.659984 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:42 crc kubenswrapper[4773]: E0122 11:56:42.660249 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:42 crc kubenswrapper[4773]: I0122 11:56:42.660615 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 11:56:42 crc kubenswrapper[4773]: E0122 11:56:42.660813 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-pplsq_openshift-ovn-kubernetes(7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5)\"" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" Jan 22 11:56:42 crc kubenswrapper[4773]: I0122 11:56:42.689939 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=10.689759062 podStartE2EDuration="10.689759062s" podCreationTimestamp="2026-01-22 11:56:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:42.684453515 +0000 UTC m=+110.262569430" watchObservedRunningTime="2026-01-22 11:56:42.689759062 +0000 UTC m=+110.267874927" Jan 22 11:56:43 crc kubenswrapper[4773]: I0122 11:56:43.657862 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:43 crc kubenswrapper[4773]: E0122 11:56:43.658021 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:44 crc kubenswrapper[4773]: I0122 11:56:44.658135 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:44 crc kubenswrapper[4773]: I0122 11:56:44.658236 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:44 crc kubenswrapper[4773]: E0122 11:56:44.658388 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:44 crc kubenswrapper[4773]: I0122 11:56:44.658413 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:44 crc kubenswrapper[4773]: E0122 11:56:44.658582 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:44 crc kubenswrapper[4773]: E0122 11:56:44.658736 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:45 crc kubenswrapper[4773]: I0122 11:56:45.657053 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:45 crc kubenswrapper[4773]: E0122 11:56:45.657530 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:46 crc kubenswrapper[4773]: I0122 11:56:46.657576 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:46 crc kubenswrapper[4773]: I0122 11:56:46.657646 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:46 crc kubenswrapper[4773]: I0122 11:56:46.657661 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:46 crc kubenswrapper[4773]: E0122 11:56:46.657769 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:46 crc kubenswrapper[4773]: E0122 11:56:46.657822 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:46 crc kubenswrapper[4773]: E0122 11:56:46.657895 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.300921 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/1.log" Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.301436 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/0.log" Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.301499 4773 generic.go:334] "Generic (PLEG): container finished" podID="73fd13f5-159b-444d-9d03-1e5fdd943673" containerID="64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd" exitCode=1 Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.301534 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerDied","Data":"64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd"} Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.301580 4773 scope.go:117] "RemoveContainer" containerID="9f09dd7b6a385cdb38454a75c8210faf69e5ad39355e690186c5a44cb9662aab" Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.302861 4773 scope.go:117] "RemoveContainer" containerID="64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd" Jan 22 11:56:47 crc kubenswrapper[4773]: E0122 11:56:47.303315 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-tsndt_openshift-multus(73fd13f5-159b-444d-9d03-1e5fdd943673)\"" pod="openshift-multus/multus-tsndt" podUID="73fd13f5-159b-444d-9d03-1e5fdd943673" Jan 22 11:56:47 crc kubenswrapper[4773]: I0122 11:56:47.657989 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:47 crc kubenswrapper[4773]: E0122 11:56:47.658148 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:48 crc kubenswrapper[4773]: I0122 11:56:48.305410 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/1.log" Jan 22 11:56:48 crc kubenswrapper[4773]: I0122 11:56:48.657681 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:48 crc kubenswrapper[4773]: I0122 11:56:48.657728 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:48 crc kubenswrapper[4773]: I0122 11:56:48.657728 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:48 crc kubenswrapper[4773]: E0122 11:56:48.657847 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:48 crc kubenswrapper[4773]: E0122 11:56:48.657948 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:48 crc kubenswrapper[4773]: E0122 11:56:48.658045 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:49 crc kubenswrapper[4773]: I0122 11:56:49.657190 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:49 crc kubenswrapper[4773]: E0122 11:56:49.657597 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:50 crc kubenswrapper[4773]: I0122 11:56:50.658056 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:50 crc kubenswrapper[4773]: I0122 11:56:50.658090 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:50 crc kubenswrapper[4773]: I0122 11:56:50.658323 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:50 crc kubenswrapper[4773]: E0122 11:56:50.658319 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:50 crc kubenswrapper[4773]: E0122 11:56:50.658507 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:50 crc kubenswrapper[4773]: E0122 11:56:50.658726 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:51 crc kubenswrapper[4773]: I0122 11:56:51.657201 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:51 crc kubenswrapper[4773]: E0122 11:56:51.657446 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:52 crc kubenswrapper[4773]: I0122 11:56:52.657268 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:52 crc kubenswrapper[4773]: I0122 11:56:52.657276 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:52 crc kubenswrapper[4773]: I0122 11:56:52.657366 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:52 crc kubenswrapper[4773]: E0122 11:56:52.658200 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:52 crc kubenswrapper[4773]: E0122 11:56:52.658275 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:52 crc kubenswrapper[4773]: E0122 11:56:52.658353 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:52 crc kubenswrapper[4773]: E0122 11:56:52.704845 4773 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 22 11:56:52 crc kubenswrapper[4773]: E0122 11:56:52.749126 4773 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 11:56:53 crc kubenswrapper[4773]: I0122 11:56:53.657221 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:53 crc kubenswrapper[4773]: E0122 11:56:53.657743 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:54 crc kubenswrapper[4773]: I0122 11:56:54.657757 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:54 crc kubenswrapper[4773]: I0122 11:56:54.657757 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:54 crc kubenswrapper[4773]: E0122 11:56:54.657917 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:54 crc kubenswrapper[4773]: E0122 11:56:54.657977 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:54 crc kubenswrapper[4773]: I0122 11:56:54.657781 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:54 crc kubenswrapper[4773]: E0122 11:56:54.658053 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:55 crc kubenswrapper[4773]: I0122 11:56:55.657429 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:55 crc kubenswrapper[4773]: E0122 11:56:55.657602 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:56 crc kubenswrapper[4773]: I0122 11:56:56.657738 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:56 crc kubenswrapper[4773]: I0122 11:56:56.657791 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:56 crc kubenswrapper[4773]: E0122 11:56:56.658038 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:56 crc kubenswrapper[4773]: I0122 11:56:56.658050 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:56 crc kubenswrapper[4773]: E0122 11:56:56.658155 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:56 crc kubenswrapper[4773]: E0122 11:56:56.658192 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:56 crc kubenswrapper[4773]: I0122 11:56:56.658876 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 11:56:57 crc kubenswrapper[4773]: I0122 11:56:57.334191 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/3.log" Jan 22 11:56:57 crc kubenswrapper[4773]: I0122 11:56:57.336631 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerStarted","Data":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} Jan 22 11:56:57 crc kubenswrapper[4773]: I0122 11:56:57.337332 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:56:57 crc kubenswrapper[4773]: I0122 11:56:57.362795 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podStartSLOduration=106.36277552 podStartE2EDuration="1m46.36277552s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:56:57.360830549 +0000 UTC m=+124.938946374" watchObservedRunningTime="2026-01-22 11:56:57.36277552 +0000 UTC m=+124.940891365" Jan 22 11:56:57 crc kubenswrapper[4773]: I0122 11:56:57.499768 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5tqwr"] Jan 22 11:56:57 crc kubenswrapper[4773]: I0122 11:56:57.499879 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:57 crc kubenswrapper[4773]: E0122 11:56:57.499977 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:57 crc kubenswrapper[4773]: E0122 11:56:57.752268 4773 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 11:56:58 crc kubenswrapper[4773]: I0122 11:56:58.657443 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:56:58 crc kubenswrapper[4773]: I0122 11:56:58.657509 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:56:58 crc kubenswrapper[4773]: E0122 11:56:58.657588 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:56:58 crc kubenswrapper[4773]: E0122 11:56:58.657657 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:56:58 crc kubenswrapper[4773]: I0122 11:56:58.657623 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:56:58 crc kubenswrapper[4773]: E0122 11:56:58.658049 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:56:59 crc kubenswrapper[4773]: I0122 11:56:59.658042 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:56:59 crc kubenswrapper[4773]: E0122 11:56:59.658187 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:56:59 crc kubenswrapper[4773]: I0122 11:56:59.658475 4773 scope.go:117] "RemoveContainer" containerID="64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd" Jan 22 11:57:00 crc kubenswrapper[4773]: I0122 11:57:00.351702 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/1.log" Jan 22 11:57:00 crc kubenswrapper[4773]: I0122 11:57:00.352121 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerStarted","Data":"1e265729999b7277b3616772c91c49a0387aee1dc557febdfe1ffcbd6f8e4f5a"} Jan 22 11:57:00 crc kubenswrapper[4773]: I0122 11:57:00.657442 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:00 crc kubenswrapper[4773]: I0122 11:57:00.657483 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:00 crc kubenswrapper[4773]: E0122 11:57:00.657658 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:57:00 crc kubenswrapper[4773]: I0122 11:57:00.657747 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:57:00 crc kubenswrapper[4773]: E0122 11:57:00.657895 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:57:00 crc kubenswrapper[4773]: E0122 11:57:00.657985 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:57:01 crc kubenswrapper[4773]: I0122 11:57:01.338754 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 11:57:01 crc kubenswrapper[4773]: I0122 11:57:01.656956 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:57:01 crc kubenswrapper[4773]: E0122 11:57:01.657096 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-5tqwr" podUID="c1ed4b8d-def5-474b-8629-cd0bae7e49a6" Jan 22 11:57:02 crc kubenswrapper[4773]: I0122 11:57:02.657453 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:02 crc kubenswrapper[4773]: I0122 11:57:02.657580 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:57:02 crc kubenswrapper[4773]: E0122 11:57:02.658596 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 11:57:02 crc kubenswrapper[4773]: I0122 11:57:02.658647 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:02 crc kubenswrapper[4773]: E0122 11:57:02.658702 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 11:57:02 crc kubenswrapper[4773]: E0122 11:57:02.658816 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 11:57:03 crc kubenswrapper[4773]: I0122 11:57:03.656987 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:57:03 crc kubenswrapper[4773]: I0122 11:57:03.659185 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 11:57:03 crc kubenswrapper[4773]: I0122 11:57:03.660479 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.657589 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.657634 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.657589 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.661422 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.661531 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.661624 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 11:57:04 crc kubenswrapper[4773]: I0122 11:57:04.661638 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.514005 4773 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.562664 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xqd5z"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.563239 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.564458 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwwct"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.564982 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.567748 4773 reflector.go:561] object-"openshift-apiserver"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.567800 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.567854 4773 reflector.go:561] object-"openshift-apiserver"/"etcd-serving-ca": failed to list *v1.ConfigMap: configmaps "etcd-serving-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.567873 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"etcd-serving-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"etcd-serving-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.567933 4773 reflector.go:561] object-"openshift-apiserver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.567951 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.568011 4773 reflector.go:561] object-"openshift-apiserver"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.568026 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.568084 4773 reflector.go:561] object-"openshift-apiserver"/"etcd-client": failed to list *v1.Secret: secrets "etcd-client" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.568101 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"etcd-client\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"etcd-client\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.568146 4773 reflector.go:561] object-"openshift-apiserver"/"audit-1": failed to list *v1.ConfigMap: configmaps "audit-1" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.568164 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"audit-1\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"audit-1\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.568512 4773 reflector.go:561] object-"openshift-apiserver"/"image-import-ca": failed to list *v1.ConfigMap: configmaps "image-import-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.568549 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"image-import-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"image-import-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.568565 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62"] Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.568995 4773 reflector.go:561] object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff": failed to list *v1.Secret: secrets "openshift-apiserver-sa-dockercfg-djjff" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.569015 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-apiserver-sa-dockercfg-djjff\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-sa-dockercfg-djjff\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.569052 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-error": failed to list *v1.Secret: secrets "v4-0-config-user-template-error" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.569065 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-error\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-error\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.569090 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.569566 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-r5t5b"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.570339 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.570413 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jm24g"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.570801 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.573471 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-provider-selection": failed to list *v1.Secret: secrets "v4-0-config-user-template-provider-selection" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.573513 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-provider-selection\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-provider-selection\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578119 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template": failed to list *v1.Secret: secrets "v4-0-config-system-ocp-branding-template" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578211 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-ocp-branding-template\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-ocp-branding-template\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578299 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data": failed to list *v1.Secret: secrets "v4-0-config-user-idp-0-file-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578335 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-idp-0-file-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-idp-0-file-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578548 4773 reflector.go:561] object-"openshift-apiserver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578588 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578704 4773 reflector.go:561] object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc": failed to list *v1.Secret: secrets "oauth-openshift-dockercfg-znhcc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578723 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"oauth-openshift-dockercfg-znhcc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"oauth-openshift-dockercfg-znhcc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578762 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-router-certs": failed to list *v1.Secret: secrets "v4-0-config-system-router-certs" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578775 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-router-certs\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-router-certs\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578819 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-serving-cert": failed to list *v1.Secret: secrets "v4-0-config-system-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578843 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.578907 4773 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config": failed to list *v1.ConfigMap: configmaps "openshift-apiserver-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.578924 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-apiserver-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579057 4773 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-images": failed to list *v1.ConfigMap: configmaps "machine-api-operator-images" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579073 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-images\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-api-operator-images\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579127 4773 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv": failed to list *v1.Secret: secrets "openshift-apiserver-operator-dockercfg-xtcjv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579150 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-dockercfg-xtcjv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-dockercfg-xtcjv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579326 4773 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579350 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579474 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-session": failed to list *v1.Secret: secrets "v4-0-config-system-session" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579495 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-session\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-session\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579590 4773 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579610 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579645 4773 reflector.go:561] object-"openshift-apiserver"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579656 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579687 4773 reflector.go:561] object-"openshift-apiserver"/"encryption-config-1": failed to list *v1.Secret: secrets "encryption-config-1" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579698 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"encryption-config-1\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"encryption-config-1\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579721 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-login": failed to list *v1.Secret: secrets "v4-0-config-user-template-login" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579742 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-login\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-login\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579727 4773 reflector.go:561] object-"openshift-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579764 4773 reflector.go:561] object-"openshift-apiserver-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579770 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579775 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579817 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-service-ca": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-service-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579827 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-service-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-service-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579859 4773 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579872 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579863 4773 reflector.go:561] object-"openshift-authentication"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579891 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579894 4773 reflector.go:561] object-"openshift-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579910 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579926 4773 reflector.go:561] object-"openshift-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579944 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579944 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.579954 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579985 4773 reflector.go:561] object-"openshift-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580007 4773 reflector.go:561] object-"openshift-machine-api"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580018 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.579989 4773 reflector.go:561] object-"openshift-authentication"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580038 4773 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580054 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580062 4773 reflector.go:561] object-"openshift-machine-api"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580073 4773 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580084 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580086 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580003 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580135 4773 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580138 4773 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-cliconfig": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-cliconfig" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580151 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580153 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-cliconfig\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-cliconfig\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580037 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580068 4773 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert": failed to list *v1.Secret: secrets "openshift-apiserver-operator-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580182 4773 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580190 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580192 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: W0122 11:57:06.580236 4773 reflector.go:561] object-"openshift-authentication"/"audit": failed to list *v1.ConfigMap: configmaps "audit" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Jan 22 11:57:06 crc kubenswrapper[4773]: E0122 11:57:06.580251 4773 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"audit\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.588353 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.589072 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.594331 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.594502 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.594662 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-lrxk6"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.594741 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.594910 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.595052 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.595167 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.595193 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.595371 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.595731 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.595775 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.598249 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rpgsr"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.598798 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.601927 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.602537 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.605375 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.606258 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.606477 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.606641 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.606943 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607093 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607155 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607308 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607393 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607397 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607474 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607556 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.607632 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.610243 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.610405 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.610589 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611075 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vnsx2"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611379 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8fll2"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611692 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611804 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611842 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611848 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611911 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.611980 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.612101 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-trjxm"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.612745 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.613075 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.613575 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.613888 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.614184 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.615933 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-zmgk2"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.616265 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.618175 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.626935 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r844w"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.628432 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.632435 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.633219 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.634011 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.636530 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.641466 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.641533 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.641556 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.641668 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.641676 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.641699 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dzt2p"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.646512 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.647127 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-fwt28"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.647601 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.647907 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.648127 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.649565 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.649738 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.649908 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.650945 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.651189 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.651366 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.651517 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.651666 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.651815 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.656013 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.656268 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.656499 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.656512 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.656630 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.656681 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.661774 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.661952 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.661976 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.662269 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.665862 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-machine-approver-tls\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.665906 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6940aa57-4e2c-41c6-a4e0-64081d1208c9-metrics-tls\") pod \"dns-operator-744455d44c-r844w\" (UID: \"6940aa57-4e2c-41c6-a4e0-64081d1208c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.665929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.665949 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-config\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.665984 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666010 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666030 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666045 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg6pk\" (UniqueName: \"kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666063 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-oauth-serving-cert\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666110 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwmg2\" (UniqueName: \"kubernetes.io/projected/943bb747-f86c-4422-b99f-97a7fdb07b87-kube-api-access-gwmg2\") pod \"downloads-7954f5f757-trjxm\" (UID: \"943bb747-f86c-4422-b99f-97a7fdb07b87\") " pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666134 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-829c9\" (UniqueName: \"kubernetes.io/projected/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-kube-api-access-829c9\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666153 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47082a42-2a2d-4d25-bbbe-ee688cd9599a-trusted-ca\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666169 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666198 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/77e4f557-fa21-4034-9a3f-f347f3c219e9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666228 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xv2v\" (UniqueName: \"kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666256 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-encryption-config\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666275 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vscf\" (UniqueName: \"kubernetes.io/projected/6940aa57-4e2c-41c6-a4e0-64081d1208c9-kube-api-access-5vscf\") pod \"dns-operator-744455d44c-r844w\" (UID: \"6940aa57-4e2c-41c6-a4e0-64081d1208c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666325 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn2jj\" (UniqueName: \"kubernetes.io/projected/ea377cf5-fbd8-462d-bfea-dd3aca3da018-kube-api-access-gn2jj\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666354 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/57270c31-9344-4b7a-9636-119edbf10dbc-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666379 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9d85\" (UniqueName: \"kubernetes.io/projected/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-kube-api-access-s9d85\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666400 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666416 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-auth-proxy-config\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666433 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666449 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/80b4c3da-59cb-4154-ade0-92772eb0fe8a-service-ca-bundle\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666464 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666480 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666499 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e69f07b-6904-4637-9b6a-95d9714f036a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-29862\" (UID: \"5e69f07b-6904-4637-9b6a-95d9714f036a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666513 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666529 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzf74\" (UniqueName: \"kubernetes.io/projected/a3b03ed7-b33f-411b-aa66-d06360af63d1-kube-api-access-pzf74\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666546 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-etcd-client\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666560 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-serving-cert\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666576 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-client-ca\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666591 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/77e4f557-fa21-4034-9a3f-f347f3c219e9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666607 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666622 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666640 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666681 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-service-ca-bundle\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666704 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57bqd\" (UniqueName: \"kubernetes.io/projected/1a76da48-177b-429e-a136-d78afeae02aa-kube-api-access-57bqd\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666720 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d12eac55-c003-4ed2-9b5f-aec3270b23ed-audit-dir\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666735 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-config\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666749 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxh2b\" (UniqueName: \"kubernetes.io/projected/dcb4689a-1339-43ea-b525-787e4a35e05c-kube-api-access-dxh2b\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666783 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666800 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-oauth-config\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666819 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666843 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666868 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-stats-auth\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666890 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-trusted-ca-bundle\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666944 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-audit-policies\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666966 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwhsf\" (UniqueName: \"kubernetes.io/projected/d12eac55-c003-4ed2-9b5f-aec3270b23ed-kube-api-access-wwhsf\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.666987 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667008 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4jbm\" (UniqueName: \"kubernetes.io/projected/57270c31-9344-4b7a-9636-119edbf10dbc-kube-api-access-d4jbm\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667032 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667055 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-metrics-certs\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667080 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667105 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667126 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg8q5\" (UniqueName: \"kubernetes.io/projected/77e4f557-fa21-4034-9a3f-f347f3c219e9-kube-api-access-lg8q5\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667151 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667173 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-serving-cert\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667194 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667214 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667236 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-config\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667257 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-config\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667311 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4sw9\" (UniqueName: \"kubernetes.io/projected/47082a42-2a2d-4d25-bbbe-ee688cd9599a-kube-api-access-s4sw9\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667336 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667358 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z9vs\" (UniqueName: \"kubernetes.io/projected/5e69f07b-6904-4637-9b6a-95d9714f036a-kube-api-access-6z9vs\") pod \"cluster-samples-operator-665b6dd947-29862\" (UID: \"5e69f07b-6904-4637-9b6a-95d9714f036a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667383 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/77e4f557-fa21-4034-9a3f-f347f3c219e9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667404 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a3b03ed7-b33f-411b-aa66-d06360af63d1-node-pullsecrets\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667423 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6cg7\" (UniqueName: \"kubernetes.io/projected/80b4c3da-59cb-4154-ade0-92772eb0fe8a-kube-api-access-b6cg7\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667446 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-serving-cert\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667477 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667492 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667521 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57270c31-9344-4b7a-9636-119edbf10dbc-serving-cert\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667537 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-service-ca\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667551 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t98h\" (UniqueName: \"kubernetes.io/projected/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-kube-api-access-7t98h\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667568 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47082a42-2a2d-4d25-bbbe-ee688cd9599a-serving-cert\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667582 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-dir\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667597 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667613 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea377cf5-fbd8-462d-bfea-dd3aca3da018-serving-cert\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667626 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit-dir\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667643 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667671 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667694 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-default-certificate\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667714 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667732 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47082a42-2a2d-4d25-bbbe-ee688cd9599a-config\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667748 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvrh4\" (UniqueName: \"kubernetes.io/projected/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-kube-api-access-vvrh4\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.667767 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.671849 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.673850 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.673862 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.674895 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.675410 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.675680 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.676029 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.676185 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.676223 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.677028 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.677601 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.678147 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.678542 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.678962 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.679520 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.679745 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.680392 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.681906 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.682426 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.683103 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.684357 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.684556 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.684800 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.686049 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.687649 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tnzhm"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.688162 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.688635 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.689267 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.691145 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.691937 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.705468 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.709626 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.713051 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-mttmk"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.714084 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.715092 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.726389 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.729343 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.730039 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-r7gll"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.730764 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.731010 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.731981 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.734998 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.736905 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-5ktrv"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.737742 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.738898 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwwct"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.741264 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r844w"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.743061 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.744395 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.745864 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.746195 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.747685 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-trjxm"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.749046 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.751151 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.752202 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rpgsr"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.753354 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jm24g"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.762171 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dzt2p"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.763752 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.764969 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.765622 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8fll2"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.765774 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-lrxk6"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.768803 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-config\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.768862 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.768904 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z9vs\" (UniqueName: \"kubernetes.io/projected/5e69f07b-6904-4637-9b6a-95d9714f036a-kube-api-access-6z9vs\") pod \"cluster-samples-operator-665b6dd947-29862\" (UID: \"5e69f07b-6904-4637-9b6a-95d9714f036a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.768930 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a3b03ed7-b33f-411b-aa66-d06360af63d1-node-pullsecrets\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.768959 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.768980 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-serving-cert\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769008 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769024 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769162 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769204 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769240 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b2948a0-e876-4813-8f06-b5a82d35b47a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769265 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c65efd55-e31e-4f78-9c88-89e9c5d35df8-srv-cert\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769308 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-service-ca\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769321 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a3b03ed7-b33f-411b-aa66-d06360af63d1-node-pullsecrets\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769337 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t98h\" (UniqueName: \"kubernetes.io/projected/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-kube-api-access-7t98h\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769362 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5mtk\" (UniqueName: \"kubernetes.io/projected/e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1-kube-api-access-x5mtk\") pod \"migrator-59844c95c7-zrkcm\" (UID: \"e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769385 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kf26\" (UniqueName: \"kubernetes.io/projected/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-kube-api-access-8kf26\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769407 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-dir\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769431 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea377cf5-fbd8-462d-bfea-dd3aca3da018-serving-cert\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769478 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769505 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47082a42-2a2d-4d25-bbbe-ee688cd9599a-config\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769529 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ba3fd722-161e-4047-bd23-6b6d5306a6ee-metrics-tls\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769570 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-config\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769598 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769617 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-config\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769625 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-key\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769696 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np5qp\" (UniqueName: \"kubernetes.io/projected/6f6256b7-548b-46ff-9c2a-d95240136df2-kube-api-access-np5qp\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769727 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-dir\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.769730 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-oauth-serving-cert\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.770232 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xqd5z"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.770454 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.770808 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-oauth-serving-cert\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.770832 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47082a42-2a2d-4d25-bbbe-ee688cd9599a-config\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.771255 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-service-ca\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.771616 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.771695 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-config\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772003 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4b2948a0-e876-4813-8f06-b5a82d35b47a-images\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772232 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/77e4f557-fa21-4034-9a3f-f347f3c219e9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772710 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-encryption-config\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772773 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn2jj\" (UniqueName: \"kubernetes.io/projected/ea377cf5-fbd8-462d-bfea-dd3aca3da018-kube-api-access-gn2jj\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772855 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-config\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.772976 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9d85\" (UniqueName: \"kubernetes.io/projected/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-kube-api-access-s9d85\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773007 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773030 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-auth-proxy-config\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773058 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3fd722-161e-4047-bd23-6b6d5306a6ee-trusted-ca\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773082 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6f6256b7-548b-46ff-9c2a-d95240136df2-proxy-tls\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773112 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c65efd55-e31e-4f78-9c88-89e9c5d35df8-profile-collector-cert\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773139 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773161 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-etcd-client\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773182 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e69f07b-6904-4637-9b6a-95d9714f036a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-29862\" (UID: \"5e69f07b-6904-4637-9b6a-95d9714f036a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773205 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-client-ca\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773231 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-serving-cert\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773258 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/77e4f557-fa21-4034-9a3f-f347f3c219e9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773301 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773333 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773359 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-serving-cert\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773382 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t24gj\" (UniqueName: \"kubernetes.io/projected/a8dcf163-8c98-4db0-95e5-1d96e45a619c-kube-api-access-t24gj\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773410 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-service-ca-bundle\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773429 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d12eac55-c003-4ed2-9b5f-aec3270b23ed-audit-dir\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773447 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxh2b\" (UniqueName: \"kubernetes.io/projected/dcb4689a-1339-43ea-b525-787e4a35e05c-kube-api-access-dxh2b\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773467 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-config\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773483 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773498 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-oauth-config\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773517 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773533 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-stats-auth\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773550 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-trusted-ca-bundle\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773566 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwgwc\" (UniqueName: \"kubernetes.io/projected/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-kube-api-access-zwgwc\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773587 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-audit-policies\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773606 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwhsf\" (UniqueName: \"kubernetes.io/projected/d12eac55-c003-4ed2-9b5f-aec3270b23ed-kube-api-access-wwhsf\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773622 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773638 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773663 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-metrics-certs\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773685 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773709 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773726 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773726 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773748 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-ca\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9txxr\" (UniqueName: \"kubernetes.io/projected/88ff6e28-5128-42f0-8280-79874dd5431e-kube-api-access-9txxr\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773767 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x48g2\" (UniqueName: \"kubernetes.io/projected/c65efd55-e31e-4f78-9c88-89e9c5d35df8-kube-api-access-x48g2\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773974 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.773992 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774009 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/244704c5-6799-48ab-9c90-8c38ae7f3c5e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774032 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4sw9\" (UniqueName: \"kubernetes.io/projected/47082a42-2a2d-4d25-bbbe-ee688cd9599a-kube-api-access-s4sw9\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774079 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3fd722-161e-4047-bd23-6b6d5306a6ee-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774095 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75f7x\" (UniqueName: \"kubernetes.io/projected/2e31d232-2138-4dd6-9b7d-71f87e414a01-kube-api-access-75f7x\") pod \"control-plane-machine-set-operator-78cbb6b69f-2hwmg\" (UID: \"2e31d232-2138-4dd6-9b7d-71f87e414a01\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774110 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f6256b7-548b-46ff-9c2a-d95240136df2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774130 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/77e4f557-fa21-4034-9a3f-f347f3c219e9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774149 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6cg7\" (UniqueName: \"kubernetes.io/projected/80b4c3da-59cb-4154-ade0-92772eb0fe8a-kube-api-access-b6cg7\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774165 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzvsl\" (UniqueName: \"kubernetes.io/projected/4b093dc7-d5ee-409a-9c1c-e003686d44e4-kube-api-access-pzvsl\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774184 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-metrics-tls\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774202 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57270c31-9344-4b7a-9636-119edbf10dbc-serving-cert\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774219 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47082a42-2a2d-4d25-bbbe-ee688cd9599a-serving-cert\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774236 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit-dir\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774606 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tnzhm"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774681 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/77e4f557-fa21-4034-9a3f-f347f3c219e9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.774817 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d12eac55-c003-4ed2-9b5f-aec3270b23ed-audit-dir\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.775182 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-service-ca-bundle\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.775229 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-auth-proxy-config\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.775933 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.776238 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-trusted-ca-bundle\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.776505 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d12eac55-c003-4ed2-9b5f-aec3270b23ed-audit-policies\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.776896 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-serving-cert\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777106 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777144 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit-dir\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777171 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-default-certificate\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777355 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-client-ca\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea377cf5-fbd8-462d-bfea-dd3aca3da018-serving-cert\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777448 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777473 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777490 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777541 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777576 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777652 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvrh4\" (UniqueName: \"kubernetes.io/projected/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-kube-api-access-vvrh4\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777664 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-encryption-config\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777680 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6940aa57-4e2c-41c6-a4e0-64081d1208c9-metrics-tls\") pod \"dns-operator-744455d44c-r844w\" (UID: \"6940aa57-4e2c-41c6-a4e0-64081d1208c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777779 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777814 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpkw4\" (UniqueName: \"kubernetes.io/projected/244704c5-6799-48ab-9c90-8c38ae7f3c5e-kube-api-access-dpkw4\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-machine-approver-tls\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777918 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.777979 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs827\" (UniqueName: \"kubernetes.io/projected/4b2948a0-e876-4813-8f06-b5a82d35b47a-kube-api-access-zs827\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778143 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778257 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg6pk\" (UniqueName: \"kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778302 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-829c9\" (UniqueName: \"kubernetes.io/projected/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-kube-api-access-829c9\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwmg2\" (UniqueName: \"kubernetes.io/projected/943bb747-f86c-4422-b99f-97a7fdb07b87-kube-api-access-gwmg2\") pod \"downloads-7954f5f757-trjxm\" (UID: \"943bb747-f86c-4422-b99f-97a7fdb07b87\") " pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778629 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47082a42-2a2d-4d25-bbbe-ee688cd9599a-trusted-ca\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778670 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xv2v\" (UniqueName: \"kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778698 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4b2948a0-e876-4813-8f06-b5a82d35b47a-proxy-tls\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778868 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vscf\" (UniqueName: \"kubernetes.io/projected/6940aa57-4e2c-41c6-a4e0-64081d1208c9-kube-api-access-5vscf\") pod \"dns-operator-744455d44c-r844w\" (UID: \"6940aa57-4e2c-41c6-a4e0-64081d1208c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778881 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-metrics-certs\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778910 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/57270c31-9344-4b7a-9636-119edbf10dbc-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.778981 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-cabundle\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779002 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3f045d2d-b976-40bb-a002-6dcdfeef75e4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779095 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3f045d2d-b976-40bb-a002-6dcdfeef75e4-srv-cert\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779129 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779166 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/80b4c3da-59cb-4154-ade0-92772eb0fe8a-service-ca-bundle\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779188 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-service-ca\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779361 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/57270c31-9344-4b7a-9636-119edbf10dbc-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779576 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779641 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779681 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88ff6e28-5128-42f0-8280-79874dd5431e-serving-cert\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779703 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzf74\" (UniqueName: \"kubernetes.io/projected/a3b03ed7-b33f-411b-aa66-d06360af63d1-kube-api-access-pzf74\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.779836 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47082a42-2a2d-4d25-bbbe-ee688cd9599a-trusted-ca\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780331 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/77e4f557-fa21-4034-9a3f-f347f3c219e9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780480 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/65999965-0f9c-435c-8687-a50033c9d661-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780552 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780572 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780590 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780865 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57bqd\" (UniqueName: \"kubernetes.io/projected/1a76da48-177b-429e-a136-d78afeae02aa-kube-api-access-57bqd\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780925 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-client\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780965 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-config\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.780994 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781147 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/80b4c3da-59cb-4154-ade0-92772eb0fe8a-service-ca-bundle\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781220 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fcms\" (UniqueName: \"kubernetes.io/projected/3f045d2d-b976-40bb-a002-6dcdfeef75e4-kube-api-access-5fcms\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781356 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8dcf163-8c98-4db0-95e5-1d96e45a619c-secret-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781391 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8hhq\" (UniqueName: \"kubernetes.io/projected/ba3fd722-161e-4047-bd23-6b6d5306a6ee-kube-api-access-t8hhq\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781426 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781454 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781482 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24nzk\" (UniqueName: \"kubernetes.io/projected/65999965-0f9c-435c-8687-a50033c9d661-kube-api-access-24nzk\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781567 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-56h2q"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.781626 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4jbm\" (UniqueName: \"kubernetes.io/projected/57270c31-9344-4b7a-9636-119edbf10dbc-kube-api-access-d4jbm\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782014 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-config\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782408 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782544 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-config-volume\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e69f07b-6904-4637-9b6a-95d9714f036a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-29862\" (UID: \"5e69f07b-6904-4637-9b6a-95d9714f036a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782787 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782865 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2e31d232-2138-4dd6-9b7d-71f87e414a01-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2hwmg\" (UID: \"2e31d232-2138-4dd6-9b7d-71f87e414a01\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.782927 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg8q5\" (UniqueName: \"kubernetes.io/projected/77e4f557-fa21-4034-9a3f-f347f3c219e9-kube-api-access-lg8q5\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.783040 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-config\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.783141 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qxzg\" (UniqueName: \"kubernetes.io/projected/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-kube-api-access-4qxzg\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.783563 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-serving-cert\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.784479 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-machine-approver-tls\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.784695 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-config\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.784972 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.785224 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-etcd-client\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.785841 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-stats-auth\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.786592 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47082a42-2a2d-4d25-bbbe-ee688cd9599a-serving-cert\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.788544 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6s7m8"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.789339 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57270c31-9344-4b7a-9636-119edbf10dbc-serving-cert\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.789948 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.790008 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-oauth-config\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.791521 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.792781 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/80b4c3da-59cb-4154-ade0-92772eb0fe8a-default-certificate\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.792902 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-serving-cert\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.793748 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-fwt28"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.793779 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d12eac55-c003-4ed2-9b5f-aec3270b23ed-serving-cert\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.795884 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-mttmk"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.797432 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.809028 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.813741 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.813800 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vnsx2"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.815437 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.818593 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.819802 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.820845 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-56h2q"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.822124 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-r5t5b"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.823110 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.824522 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-5ktrv"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.824854 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.825677 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.826802 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-r7gll"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.827784 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-64x99"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.828975 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.828979 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-64x99"] Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.830788 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6940aa57-4e2c-41c6-a4e0-64081d1208c9-metrics-tls\") pod \"dns-operator-744455d44c-r844w\" (UID: \"6940aa57-4e2c-41c6-a4e0-64081d1208c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.845382 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.864775 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884689 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qxzg\" (UniqueName: \"kubernetes.io/projected/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-kube-api-access-4qxzg\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884732 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884817 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b2948a0-e876-4813-8f06-b5a82d35b47a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884844 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c65efd55-e31e-4f78-9c88-89e9c5d35df8-srv-cert\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884867 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5mtk\" (UniqueName: \"kubernetes.io/projected/e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1-kube-api-access-x5mtk\") pod \"migrator-59844c95c7-zrkcm\" (UID: \"e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884889 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kf26\" (UniqueName: \"kubernetes.io/projected/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-kube-api-access-8kf26\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884931 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ba3fd722-161e-4047-bd23-6b6d5306a6ee-metrics-tls\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884959 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-key\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.884980 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np5qp\" (UniqueName: \"kubernetes.io/projected/6f6256b7-548b-46ff-9c2a-d95240136df2-kube-api-access-np5qp\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885004 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4b2948a0-e876-4813-8f06-b5a82d35b47a-images\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885070 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-config\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885097 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6f6256b7-548b-46ff-9c2a-d95240136df2-proxy-tls\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885117 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3fd722-161e-4047-bd23-6b6d5306a6ee-trusted-ca\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885136 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c65efd55-e31e-4f78-9c88-89e9c5d35df8-profile-collector-cert\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885170 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885191 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885212 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-serving-cert\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885235 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t24gj\" (UniqueName: \"kubernetes.io/projected/a8dcf163-8c98-4db0-95e5-1d96e45a619c-kube-api-access-t24gj\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885266 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-config\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885321 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwgwc\" (UniqueName: \"kubernetes.io/projected/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-kube-api-access-zwgwc\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885415 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-ca\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885438 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9txxr\" (UniqueName: \"kubernetes.io/projected/88ff6e28-5128-42f0-8280-79874dd5431e-kube-api-access-9txxr\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885458 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x48g2\" (UniqueName: \"kubernetes.io/projected/c65efd55-e31e-4f78-9c88-89e9c5d35df8-kube-api-access-x48g2\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885480 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/244704c5-6799-48ab-9c90-8c38ae7f3c5e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885501 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3fd722-161e-4047-bd23-6b6d5306a6ee-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885525 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75f7x\" (UniqueName: \"kubernetes.io/projected/2e31d232-2138-4dd6-9b7d-71f87e414a01-kube-api-access-75f7x\") pod \"control-plane-machine-set-operator-78cbb6b69f-2hwmg\" (UID: \"2e31d232-2138-4dd6-9b7d-71f87e414a01\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885548 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f6256b7-548b-46ff-9c2a-d95240136df2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885586 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzvsl\" (UniqueName: \"kubernetes.io/projected/4b093dc7-d5ee-409a-9c1c-e003686d44e4-kube-api-access-pzvsl\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885608 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-metrics-tls\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885645 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885667 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885712 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpkw4\" (UniqueName: \"kubernetes.io/projected/244704c5-6799-48ab-9c90-8c38ae7f3c5e-kube-api-access-dpkw4\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885756 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs827\" (UniqueName: \"kubernetes.io/projected/4b2948a0-e876-4813-8f06-b5a82d35b47a-kube-api-access-zs827\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885791 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4b2948a0-e876-4813-8f06-b5a82d35b47a-proxy-tls\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885827 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-cabundle\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885846 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3f045d2d-b976-40bb-a002-6dcdfeef75e4-srv-cert\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885865 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3f045d2d-b976-40bb-a002-6dcdfeef75e4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885885 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-service-ca\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885932 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/65999965-0f9c-435c-8687-a50033c9d661-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.885954 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88ff6e28-5128-42f0-8280-79874dd5431e-serving-cert\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886034 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-client\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886065 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8hhq\" (UniqueName: \"kubernetes.io/projected/ba3fd722-161e-4047-bd23-6b6d5306a6ee-kube-api-access-t8hhq\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886087 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fcms\" (UniqueName: \"kubernetes.io/projected/3f045d2d-b976-40bb-a002-6dcdfeef75e4-kube-api-access-5fcms\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886107 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8dcf163-8c98-4db0-95e5-1d96e45a619c-secret-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886140 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24nzk\" (UniqueName: \"kubernetes.io/projected/65999965-0f9c-435c-8687-a50033c9d661-kube-api-access-24nzk\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886168 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-config-volume\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886188 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.886212 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2e31d232-2138-4dd6-9b7d-71f87e414a01-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2hwmg\" (UID: \"2e31d232-2138-4dd6-9b7d-71f87e414a01\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.888891 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b2948a0-e876-4813-8f06-b5a82d35b47a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.891832 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f6256b7-548b-46ff-9c2a-d95240136df2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.892772 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.911756 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.912555 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.928502 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.946331 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.952501 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.965457 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 11:57:06 crc kubenswrapper[4773]: I0122 11:57:06.985856 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.005365 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.025397 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.045054 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.053840 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/88ff6e28-5128-42f0-8280-79874dd5431e-serving-cert\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.065360 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.074210 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-client\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.085729 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.106623 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.125382 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.145397 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.151069 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-config\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.165060 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.172332 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ba3fd722-161e-4047-bd23-6b6d5306a6ee-metrics-tls\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.184835 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.190969 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-ca\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.210315 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.221456 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ba3fd722-161e-4047-bd23-6b6d5306a6ee-trusted-ca\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.226234 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.231749 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/88ff6e28-5128-42f0-8280-79874dd5431e-etcd-service-ca\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.245905 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.265022 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.306218 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.324752 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.345543 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.366425 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.385567 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.405842 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.424988 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.445804 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.466122 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.485921 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.494394 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3f045d2d-b976-40bb-a002-6dcdfeef75e4-srv-cert\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.505732 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.514119 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c65efd55-e31e-4f78-9c88-89e9c5d35df8-profile-collector-cert\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.514836 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8dcf163-8c98-4db0-95e5-1d96e45a619c-secret-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.515495 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3f045d2d-b976-40bb-a002-6dcdfeef75e4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.525652 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.545419 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.565943 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.585399 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.606008 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.625616 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.645806 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.665205 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.684513 4773 request.go:700] Waited for 1.005555388s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver-operator/secrets?fieldSelector=metadata.name%3Dkube-apiserver-operator-serving-cert&limit=500&resourceVersion=0 Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.686253 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.704744 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.725900 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.732820 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c65efd55-e31e-4f78-9c88-89e9c5d35df8-srv-cert\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.745245 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.765507 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.769993 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770039 4773 secret.go:188] Couldn't get secret openshift-apiserver/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770047 4773 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-service-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770079 4773 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770114 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.270093274 +0000 UTC m=+135.848209099 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770136 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.270122895 +0000 UTC m=+135.848238740 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770156 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.270146226 +0000 UTC m=+135.848262061 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-service-ca" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.770181 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.270173167 +0000 UTC m=+135.848289002 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.772758 4773 configmap.go:193] Couldn't get configMap openshift-authentication/audit: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.772779 4773 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.772798 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.272787131 +0000 UTC m=+135.850902956 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.772822 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-login: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.772850 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.272842093 +0000 UTC m=+135.850957918 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-login" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.772867 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.272857754 +0000 UTC m=+135.850973579 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.775795 4773 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.775880 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.275854594 +0000 UTC m=+135.853970479 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777523 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-ocp-branding-template: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777655 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.277629091 +0000 UTC m=+135.855744976 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-ocp-branding-template" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777701 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-error: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777767 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.277746526 +0000 UTC m=+135.855862391 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-error" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777870 4773 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777947 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.277923183 +0000 UTC m=+135.856039048 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.777987 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-provider-selection: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778049 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278029578 +0000 UTC m=+135.856145443 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-provider-selection" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778090 4773 secret.go:188] Couldn't get secret openshift-apiserver/encryption-config-1: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778141 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278124202 +0000 UTC m=+135.856240067 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "encryption-config" (UniqueName: "kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778224 4773 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778327 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278263688 +0000 UTC m=+135.856379563 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778380 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-idp-0-file-data: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778437 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278418745 +0000 UTC m=+135.856534610 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-idp-0-file-data" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778476 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778531 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278510069 +0000 UTC m=+135.856625934 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-serving-cert" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778593 4773 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-cliconfig: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778651 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278631244 +0000 UTC m=+135.856747109 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-cliconfig" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778724 4773 configmap.go:193] Couldn't get configMap openshift-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778784 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.27876668 +0000 UTC m=+135.856882545 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778818 4773 secret.go:188] Couldn't get secret openshift-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778869 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278852654 +0000 UTC m=+135.856968519 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778901 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-session: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778954 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.278938307 +0000 UTC m=+135.857054162 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-session" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.778988 4773 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.779045 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.279027861 +0000 UTC m=+135.857143726 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.780317 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-router-certs: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.780396 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.28037338 +0000 UTC m=+135.858489245 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-router-certs" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.781164 4773 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.781241 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.281225037 +0000 UTC m=+135.859340872 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785729 4773 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785798 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config podName:dcb4689a-1339-43ea-b525-787e4a35e05c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.285780634 +0000 UTC m=+135.863896459 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config") pod "openshift-apiserver-operator-796bbdcf4f-xnb62" (UID: "dcb4689a-1339-43ea-b525-787e4a35e05c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785824 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785852 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.285844337 +0000 UTC m=+135.863960162 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785883 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/etcd-serving-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785910 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.28590245 +0000 UTC m=+135.864018275 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-serving-ca" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785937 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/audit-1: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785960 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.285953432 +0000 UTC m=+135.864069257 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "audit" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.785992 4773 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.786017 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert podName:dcb4689a-1339-43ea-b525-787e4a35e05c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.286011524 +0000 UTC m=+135.864127349 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-xnb62" (UID: "dcb4689a-1339-43ea-b525-787e4a35e05c") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.786106 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/image-import-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.786238 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.286201823 +0000 UTC m=+135.864317718 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "image-import-ca" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.786610 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.794731 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.805334 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.811922 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.825166 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.831652 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2e31d232-2138-4dd6-9b7d-71f87e414a01-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2hwmg\" (UID: \"2e31d232-2138-4dd6-9b7d-71f87e414a01\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.844526 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.864671 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.871344 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4b2948a0-e876-4813-8f06-b5a82d35b47a-images\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.885276 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.888602 4773 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.888687 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume podName:a8dcf163-8c98-4db0-95e5-1d96e45a619c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.388665408 +0000 UTC m=+135.966781233 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume") pod "collect-profiles-29484705-kmcsc" (UID: "a8dcf163-8c98-4db0-95e5-1d96e45a619c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889640 4773 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889680 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f6256b7-548b-46ff-9c2a-d95240136df2-proxy-tls podName:6f6256b7-548b-46ff-9c2a-d95240136df2 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.389670972 +0000 UTC m=+135.967786797 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/6f6256b7-548b-46ff-9c2a-d95240136df2-proxy-tls") pod "machine-config-controller-84d6567774-jc2cw" (UID: "6f6256b7-548b-46ff-9c2a-d95240136df2") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889733 4773 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889763 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-key podName:f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.389755586 +0000 UTC m=+135.967871401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-key") pod "service-ca-9c57cc56f-mttmk" (UID: "f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889786 4773 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889806 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-config podName:a36fde19-2db8-4b4d-9702-a8a3a20a77e1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.389800728 +0000 UTC m=+135.967916553 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-config") pod "service-ca-operator-777779d784-h7fz7" (UID: "a36fde19-2db8-4b4d-9702-a8a3a20a77e1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889841 4773 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889884 4773 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889907 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-serving-cert podName:a36fde19-2db8-4b4d-9702-a8a3a20a77e1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.38985843 +0000 UTC m=+135.967974255 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-serving-cert") pod "service-ca-operator-777779d784-h7fz7" (UID: "a36fde19-2db8-4b4d-9702-a8a3a20a77e1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.889941 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics podName:4b093dc7-d5ee-409a-9c1c-e003686d44e4 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.389934123 +0000 UTC m=+135.968049948 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics") pod "marketplace-operator-79b997595-tnzhm" (UID: "4b093dc7-d5ee-409a-9c1c-e003686d44e4") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891060 4773 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891099 4773 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891141 4773 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891194 4773 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891219 4773 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891225 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/65999965-0f9c-435c-8687-a50033c9d661-package-server-manager-serving-cert podName:65999965-0f9c-435c-8687-a50033c9d661 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.391134935 +0000 UTC m=+135.969250750 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/65999965-0f9c-435c-8687-a50033c9d661-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-tqwkj" (UID: "65999965-0f9c-435c-8687-a50033c9d661") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891255 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca podName:4b093dc7-d5ee-409a-9c1c-e003686d44e4 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.39123792 +0000 UTC m=+135.969353815 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca") pod "marketplace-operator-79b997595-tnzhm" (UID: "4b093dc7-d5ee-409a-9c1c-e003686d44e4") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891274 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/244704c5-6799-48ab-9c90-8c38ae7f3c5e-webhook-certs podName:244704c5-6799-48ab-9c90-8c38ae7f3c5e nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.391266271 +0000 UTC m=+135.969382206 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/244704c5-6799-48ab-9c90-8c38ae7f3c5e-webhook-certs") pod "multus-admission-controller-857f4d67dd-r7gll" (UID: "244704c5-6799-48ab-9c90-8c38ae7f3c5e") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891314 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-config-volume podName:866b7e00-9f0a-4b4d-9b5b-91457bcd2409 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.391303753 +0000 UTC m=+135.969419688 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-config-volume") pod "dns-default-5ktrv" (UID: "866b7e00-9f0a-4b4d-9b5b-91457bcd2409") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891329 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-metrics-tls podName:866b7e00-9f0a-4b4d-9b5b-91457bcd2409 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.391322884 +0000 UTC m=+135.969438799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-metrics-tls") pod "dns-default-5ktrv" (UID: "866b7e00-9f0a-4b4d-9b5b-91457bcd2409") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891279 4773 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: E0122 11:57:07.891400 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-cabundle podName:f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd nodeName:}" failed. No retries permitted until 2026-01-22 11:57:08.391389387 +0000 UTC m=+135.969505212 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-cabundle") pod "service-ca-9c57cc56f-mttmk" (UID: "f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.899746 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4b2948a0-e876-4813-8f06-b5a82d35b47a-proxy-tls\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.905405 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.925994 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.945220 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.965058 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 11:57:07 crc kubenswrapper[4773]: I0122 11:57:07.985297 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.011863 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.026077 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.045386 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.066575 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.085388 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.105958 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.125528 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.145955 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.165658 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.186096 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.205631 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.225454 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.245828 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.264790 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.285698 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.305251 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311037 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311101 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311129 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311171 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311198 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311224 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311254 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311346 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311501 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311537 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311616 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311672 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311706 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311756 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.311954 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312019 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312088 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312140 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312209 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312247 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312295 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312399 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312419 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312462 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312524 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312544 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312572 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312823 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.312900 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.326325 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.345417 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.365022 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.385043 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.405027 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.414738 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-config-volume\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.414781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415469 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-config-volume\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415501 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415740 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-key\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415805 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-config\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415825 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415870 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6f6256b7-548b-46ff-9c2a-d95240136df2-proxy-tls\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415946 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.415990 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-serving-cert\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.416133 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.416159 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/244704c5-6799-48ab-9c90-8c38ae7f3c5e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.416251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-metrics-tls\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.416448 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-cabundle\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.416521 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/65999965-0f9c-435c-8687-a50033c9d661-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.417083 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-config\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.418141 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-cabundle\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.419123 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/244704c5-6799-48ab-9c90-8c38ae7f3c5e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.419861 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.420127 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-signing-key\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.420136 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6f6256b7-548b-46ff-9c2a-d95240136df2-proxy-tls\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.420221 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-serving-cert\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.420597 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/65999965-0f9c-435c-8687-a50033c9d661-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.426184 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.429799 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-metrics-tls\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.484576 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z9vs\" (UniqueName: \"kubernetes.io/projected/5e69f07b-6904-4637-9b6a-95d9714f036a-kube-api-access-6z9vs\") pod \"cluster-samples-operator-665b6dd947-29862\" (UID: \"5e69f07b-6904-4637-9b6a-95d9714f036a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.501178 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t98h\" (UniqueName: \"kubernetes.io/projected/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-kube-api-access-7t98h\") pod \"console-f9d7485db-lrxk6\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.519002 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.521087 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn2jj\" (UniqueName: \"kubernetes.io/projected/ea377cf5-fbd8-462d-bfea-dd3aca3da018-kube-api-access-gn2jj\") pod \"route-controller-manager-6576b87f9c-5jllv\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.542804 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.548046 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9d85\" (UniqueName: \"kubernetes.io/projected/622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c-kube-api-access-s9d85\") pod \"authentication-operator-69f744f599-rpgsr\" (UID: \"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.564009 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.579423 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwhsf\" (UniqueName: \"kubernetes.io/projected/d12eac55-c003-4ed2-9b5f-aec3270b23ed-kube-api-access-wwhsf\") pod \"apiserver-7bbb656c7d-xsqsg\" (UID: \"d12eac55-c003-4ed2-9b5f-aec3270b23ed\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.610808 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/77e4f557-fa21-4034-9a3f-f347f3c219e9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.627799 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4sw9\" (UniqueName: \"kubernetes.io/projected/47082a42-2a2d-4d25-bbbe-ee688cd9599a-kube-api-access-s4sw9\") pod \"console-operator-58897d9998-vnsx2\" (UID: \"47082a42-2a2d-4d25-bbbe-ee688cd9599a\") " pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.644244 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6cg7\" (UniqueName: \"kubernetes.io/projected/80b4c3da-59cb-4154-ade0-92772eb0fe8a-kube-api-access-b6cg7\") pod \"router-default-5444994796-zmgk2\" (UID: \"80b4c3da-59cb-4154-ade0-92772eb0fe8a\") " pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.652262 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.660261 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvrh4\" (UniqueName: \"kubernetes.io/projected/9ddb15eb-ebaf-4cb3-90b6-6156ffd665af-kube-api-access-vvrh4\") pod \"machine-approver-56656f9798-pd2kc\" (UID: \"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.698781 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-829c9\" (UniqueName: \"kubernetes.io/projected/bfd1a7c2-cdbc-4c02-97b4-6a09476397b8-kube-api-access-829c9\") pod \"openshift-controller-manager-operator-756b6f6bc6-6h722\" (UID: \"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.707556 4773 request.go:700] Waited for 1.928772985s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/serviceaccounts/default/token Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.764241 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwmg2\" (UniqueName: \"kubernetes.io/projected/943bb747-f86c-4422-b99f-97a7fdb07b87-kube-api-access-gwmg2\") pod \"downloads-7954f5f757-trjxm\" (UID: \"943bb747-f86c-4422-b99f-97a7fdb07b87\") " pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.764424 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vscf\" (UniqueName: \"kubernetes.io/projected/6940aa57-4e2c-41c6-a4e0-64081d1208c9-kube-api-access-5vscf\") pod \"dns-operator-744455d44c-r844w\" (UID: \"6940aa57-4e2c-41c6-a4e0-64081d1208c9\") " pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.781157 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-lrxk6"] Jan 22 11:57:08 crc kubenswrapper[4773]: W0122 11:57:08.788003 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod139ce7fa_7d44_4233_b4e9_b7827ce4c68d.slice/crio-03d13dfb3744909c7dd0be8312429d158b675a67abf385b8404311d32139a8ca WatchSource:0}: Error finding container 03d13dfb3744909c7dd0be8312429d158b675a67abf385b8404311d32139a8ca: Status 404 returned error can't find the container with id 03d13dfb3744909c7dd0be8312429d158b675a67abf385b8404311d32139a8ca Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.791444 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862"] Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.806106 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.806186 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.812911 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv"] Jan 22 11:57:08 crc kubenswrapper[4773]: W0122 11:57:08.825768 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea377cf5_fbd8_462d_bfea_dd3aca3da018.slice/crio-93fc36f8e0a73f53db3a16b3a3eb6c6486f5fa7d5aaf2145151ca528cc9569f7 WatchSource:0}: Error finding container 93fc36f8e0a73f53db3a16b3a3eb6c6486f5fa7d5aaf2145151ca528cc9569f7: Status 404 returned error can't find the container with id 93fc36f8e0a73f53db3a16b3a3eb6c6486f5fa7d5aaf2145151ca528cc9569f7 Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.826836 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.836023 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.846160 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.852337 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.881264 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4jbm\" (UniqueName: \"kubernetes.io/projected/57270c31-9344-4b7a-9636-119edbf10dbc-kube-api-access-d4jbm\") pod \"openshift-config-operator-7777fb866f-8fll2\" (UID: \"57270c31-9344-4b7a-9636-119edbf10dbc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.885923 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.919482 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.921639 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg8q5\" (UniqueName: \"kubernetes.io/projected/77e4f557-fa21-4034-9a3f-f347f3c219e9-kube-api-access-lg8q5\") pod \"cluster-image-registry-operator-dc59b4c8b-gqc2d\" (UID: \"77e4f557-fa21-4034-9a3f-f347f3c219e9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.930758 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.930903 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.943589 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.946170 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.959702 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-r844w" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.965456 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.984800 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" Jan 22 11:57:08 crc kubenswrapper[4773]: I0122 11:57:08.987136 4773 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.005060 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.011560 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.025275 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.047623 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rpgsr"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.065217 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-vnsx2"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.066565 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qxzg\" (UniqueName: \"kubernetes.io/projected/f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd-kube-api-access-4qxzg\") pod \"service-ca-9c57cc56f-mttmk\" (UID: \"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd\") " pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.079623 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5mtk\" (UniqueName: \"kubernetes.io/projected/e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1-kube-api-access-x5mtk\") pod \"migrator-59844c95c7-zrkcm\" (UID: \"e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.107893 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kf26\" (UniqueName: \"kubernetes.io/projected/866b7e00-9f0a-4b4d-9b5b-91457bcd2409-kube-api-access-8kf26\") pod \"dns-default-5ktrv\" (UID: \"866b7e00-9f0a-4b4d-9b5b-91457bcd2409\") " pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.119256 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-trjxm"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.122822 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np5qp\" (UniqueName: \"kubernetes.io/projected/6f6256b7-548b-46ff-9c2a-d95240136df2-kube-api-access-np5qp\") pod \"machine-config-controller-84d6567774-jc2cw\" (UID: \"6f6256b7-548b-46ff-9c2a-d95240136df2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.140924 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mkth4\" (UID: \"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.159849 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t24gj\" (UniqueName: \"kubernetes.io/projected/a8dcf163-8c98-4db0-95e5-1d96e45a619c-kube-api-access-t24gj\") pod \"collect-profiles-29484705-kmcsc\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.166181 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.172974 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.175821 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.179132 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwgwc\" (UniqueName: \"kubernetes.io/projected/a36fde19-2db8-4b4d-9702-a8a3a20a77e1-kube-api-access-zwgwc\") pod \"service-ca-operator-777779d784-h7fz7\" (UID: \"a36fde19-2db8-4b4d-9702-a8a3a20a77e1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.192739 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.199602 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9txxr\" (UniqueName: \"kubernetes.io/projected/88ff6e28-5128-42f0-8280-79874dd5431e-kube-api-access-9txxr\") pod \"etcd-operator-b45778765-fwt28\" (UID: \"88ff6e28-5128-42f0-8280-79874dd5431e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.201605 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.223028 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x48g2\" (UniqueName: \"kubernetes.io/projected/c65efd55-e31e-4f78-9c88-89e9c5d35df8-kube-api-access-x48g2\") pod \"catalog-operator-68c6474976-2rj56\" (UID: \"c65efd55-e31e-4f78-9c88-89e9c5d35df8\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.239547 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ba3fd722-161e-4047-bd23-6b6d5306a6ee-bound-sa-token\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.280378 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24nzk\" (UniqueName: \"kubernetes.io/projected/65999965-0f9c-435c-8687-a50033c9d661-kube-api-access-24nzk\") pod \"package-server-manager-789f6589d5-tqwkj\" (UID: \"65999965-0f9c-435c-8687-a50033c9d661\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.299551 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fcms\" (UniqueName: \"kubernetes.io/projected/3f045d2d-b976-40bb-a002-6dcdfeef75e4-kube-api-access-5fcms\") pod \"olm-operator-6b444d44fb-s9vvb\" (UID: \"3f045d2d-b976-40bb-a002-6dcdfeef75e4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.307985 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.311971 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-ocp-branding-template: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312050 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312029856 +0000 UTC m=+137.890145681 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-ocp-branding-template" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312279 4773 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312367 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert podName:dcb4689a-1339-43ea-b525-787e4a35e05c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.31235591 +0000 UTC m=+137.890471735 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-xnb62" (UID: "dcb4689a-1339-43ea-b525-787e4a35e05c") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312387 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-error: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312415 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-idp-0-file-data: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312432 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312424443 +0000 UTC m=+137.890540268 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-error" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312447 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-session: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312450 4773 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312478 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-provider-selection: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312485 4773 secret.go:188] Couldn't get secret openshift-apiserver/encryption-config-1: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312506 4773 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-cliconfig: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312504 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/image-import-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312535 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/audit-1: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312543 4773 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312546 4773 configmap.go:193] Couldn't get configMap openshift-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312575 4773 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312582 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312583 4773 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312556 4773 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312626 4773 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312628 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-router-certs: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312565 4773 secret.go:188] Couldn't get secret openshift-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312653 4773 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312638 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/etcd-serving-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312677 4773 configmap.go:193] Couldn't get configMap openshift-apiserver/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312460 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312448154 +0000 UTC m=+137.890563979 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-idp-0-file-data" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312561 4773 configmap.go:193] Couldn't get configMap openshift-authentication/audit: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312735 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312710355 +0000 UTC m=+137.890826240 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-session" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312755 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312744147 +0000 UTC m=+137.890860082 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-provider-selection" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312775 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312766488 +0000 UTC m=+137.890882403 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "encryption-config" (UniqueName: "kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312793 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312785378 +0000 UTC m=+137.890901303 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-cliconfig" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312807 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312801119 +0000 UTC m=+137.890917044 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312833 4773 secret.go:188] Couldn't get secret openshift-apiserver/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312869 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312854701 +0000 UTC m=+137.890970626 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312909 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312898653 +0000 UTC m=+137.891014578 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "image-import-ca" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312936 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312928645 +0000 UTC m=+137.891044570 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "audit" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312953 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312945395 +0000 UTC m=+137.891061330 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312969 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312961416 +0000 UTC m=+137.891077351 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312970 4773 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312983 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312977567 +0000 UTC m=+137.891093502 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.312984 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-login: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313003 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.312995657 +0000 UTC m=+137.891111582 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313004 4773 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-service-ca: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313022 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313012128 +0000 UTC m=+137.891128043 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313055 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.3130482 +0000 UTC m=+137.891164115 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313068 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config podName:dcb4689a-1339-43ea-b525-787e4a35e05c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.31306285 +0000 UTC m=+137.891178785 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config") pod "openshift-apiserver-operator-796bbdcf4f-xnb62" (UID: "dcb4689a-1339-43ea-b525-787e4a35e05c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313078 4773 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313082 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313074901 +0000 UTC m=+137.891190816 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-router-certs" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313099 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313092282 +0000 UTC m=+137.891208227 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313114 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313107242 +0000 UTC m=+137.891223177 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313126 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313120633 +0000 UTC m=+137.891236558 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etcd-serving-ca" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313141 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle podName:a3b03ed7-b33f-411b-aa66-d06360af63d1 nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313132603 +0000 UTC m=+137.891248538 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle") pod "apiserver-76f77b778f-xqd5z" (UID: "a3b03ed7-b33f-411b-aa66-d06360af63d1") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313157 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313147354 +0000 UTC m=+137.891263289 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313171 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313164635 +0000 UTC m=+137.891280560 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313186 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313179585 +0000 UTC m=+137.891295520 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-login" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313199 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313192736 +0000 UTC m=+137.891308671 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-service-ca" (UniqueName: "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.313213 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.313206377 +0000 UTC m=+137.891322312 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-serving-cert" (UniqueName: "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync secret cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.318942 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpkw4\" (UniqueName: \"kubernetes.io/projected/244704c5-6799-48ab-9c90-8c38ae7f3c5e-kube-api-access-dpkw4\") pod \"multus-admission-controller-857f4d67dd-r7gll\" (UID: \"244704c5-6799-48ab-9c90-8c38ae7f3c5e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.340336 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs827\" (UniqueName: \"kubernetes.io/projected/4b2948a0-e876-4813-8f06-b5a82d35b47a-kube-api-access-zs827\") pod \"machine-config-operator-74547568cd-2fmtb\" (UID: \"4b2948a0-e876-4813-8f06-b5a82d35b47a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.350009 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.361199 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8hhq\" (UniqueName: \"kubernetes.io/projected/ba3fd722-161e-4047-bd23-6b6d5306a6ee-kube-api-access-t8hhq\") pod \"ingress-operator-5b745b69d9-ls4mr\" (UID: \"ba3fd722-161e-4047-bd23-6b6d5306a6ee\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.369047 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.378850 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzvsl\" (UniqueName: \"kubernetes.io/projected/4b093dc7-d5ee-409a-9c1c-e003686d44e4-kube-api-access-pzvsl\") pod \"marketplace-operator-79b997595-tnzhm\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.382081 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zmgk2" event={"ID":"80b4c3da-59cb-4154-ade0-92772eb0fe8a","Type":"ContainerStarted","Data":"c9324e17654b6e00c49742f085f1ee23970613bc42f939ff101a9cf66f45a616"} Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.383061 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" event={"ID":"ea377cf5-fbd8-462d-bfea-dd3aca3da018","Type":"ContainerStarted","Data":"93fc36f8e0a73f53db3a16b3a3eb6c6486f5fa7d5aaf2145151ca528cc9569f7"} Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.384320 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lrxk6" event={"ID":"139ce7fa-7d44-4233-b4e9-b7827ce4c68d","Type":"ContainerStarted","Data":"03d13dfb3744909c7dd0be8312429d158b675a67abf385b8404311d32139a8ca"} Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.395338 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.404302 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.405041 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.421409 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.425491 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.434115 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.443672 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.445714 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.449502 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.465851 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.484721 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.486383 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.507621 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.526177 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.548932 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.559126 4773 projected.go:288] Couldn't get configMap openshift-apiserver-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.559177 4773 projected.go:194] Error preparing data for projected volume kube-api-access-dxh2b for pod openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.559242 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dcb4689a-1339-43ea-b525-787e4a35e05c-kube-api-access-dxh2b podName:dcb4689a-1339-43ea-b525-787e4a35e05c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.059220971 +0000 UTC m=+137.637336796 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-dxh2b" (UniqueName: "kubernetes.io/projected/dcb4689a-1339-43ea-b525-787e4a35e05c-kube-api-access-dxh2b") pod "openshift-apiserver-operator-796bbdcf4f-xnb62" (UID: "dcb4689a-1339-43ea-b525-787e4a35e05c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.566141 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.591450 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.605637 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.607423 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.625572 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.640187 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.647568 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 11:57:09 crc kubenswrapper[4773]: W0122 11:57:09.661539 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77e4f557_fa21_4034_9a3f_f347f3c219e9.slice/crio-b42c41ee1bdbd116dcfefa20f8106ce4cda5150b2eee90e4f67c3cc61e5b619c WatchSource:0}: Error finding container b42c41ee1bdbd116dcfefa20f8106ce4cda5150b2eee90e4f67c3cc61e5b619c: Status 404 returned error can't find the container with id b42c41ee1bdbd116dcfefa20f8106ce4cda5150b2eee90e4f67c3cc61e5b619c Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.664767 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.679468 4773 projected.go:288] Couldn't get configMap openshift-controller-manager/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.679554 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-mttmk"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.685338 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.708939 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4"] Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.709184 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.726033 4773 request.go:700] Waited for 2.06138345s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/secrets?fieldSelector=metadata.name%3Dv4-0-config-user-template-login&limit=500&resourceVersion=0 Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.732123 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.748907 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: W0122 11:57:09.749213 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8c75f51_a4f1_4ce3_9e40_f90eaf21cebd.slice/crio-8b44ced21c937ed40a4c566ff090408441b54afcaac9f72c4904c3d54f13d103 WatchSource:0}: Error finding container 8b44ced21c937ed40a4c566ff090408441b54afcaac9f72c4904c3d54f13d103: Status 404 returned error can't find the container with id 8b44ced21c937ed40a4c566ff090408441b54afcaac9f72c4904c3d54f13d103 Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.751691 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-fwt28"] Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.755951 4773 projected.go:288] Couldn't get configMap openshift-authentication/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.766837 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzf74\" (UniqueName: \"kubernetes.io/projected/a3b03ed7-b33f-411b-aa66-d06360af63d1-kube-api-access-pzf74\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.772691 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.778277 4773 projected.go:288] Couldn't get configMap openshift-machine-api/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.794512 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.805803 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.826329 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.848265 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.872718 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.880493 4773 projected.go:194] Error preparing data for projected volume kube-api-access-sg6pk for pod openshift-controller-manager/controller-manager-879f6c89f-r5t5b: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.880606 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk podName:143cf6af-d9df-44fc-8281-17ee0a86807c nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.380574894 +0000 UTC m=+137.958690719 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-sg6pk" (UniqueName: "kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk") pod "controller-manager-879f6c89f-r5t5b" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.890209 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.920710 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.941340 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.945552 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.946219 4773 projected.go:194] Error preparing data for projected volume kube-api-access-5xv2v for pod openshift-authentication/oauth-openshift-558db77b4-nwwct: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: E0122 11:57:09.946361 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v podName:4db3dc80-10df-4d72-925f-ab5c927bc6ef nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.446333747 +0000 UTC m=+138.024449572 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-5xv2v" (UniqueName: "kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v") pod "oauth-openshift-558db77b4-nwwct" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.965340 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 11:57:09 crc kubenswrapper[4773]: I0122 11:57:09.993302 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.006204 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.026821 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.046346 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.066352 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.070425 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxh2b\" (UniqueName: \"kubernetes.io/projected/dcb4689a-1339-43ea-b525-787e4a35e05c-kube-api-access-dxh2b\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.078692 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxh2b\" (UniqueName: \"kubernetes.io/projected/dcb4689a-1339-43ea-b525-787e4a35e05c-kube-api-access-dxh2b\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.085804 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.105928 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.127300 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.145265 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.174834 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.188332 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.212698 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.231526 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.245224 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.249348 4773 projected.go:194] Error preparing data for projected volume kube-api-access-57bqd for pod openshift-machine-api/machine-api-operator-5694c8668f-jm24g: failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.249437 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/1a76da48-177b-429e-a136-d78afeae02aa-kube-api-access-57bqd podName:1a76da48-177b-429e-a136-d78afeae02aa nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.749415997 +0000 UTC m=+138.327531822 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-57bqd" (UniqueName: "kubernetes.io/projected/1a76da48-177b-429e-a136-d78afeae02aa-kube-api-access-57bqd") pod "machine-api-operator-5694c8668f-jm24g" (UID: "1a76da48-177b-429e-a136-d78afeae02aa") : failed to sync configmap cache: timed out waiting for the condition Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.259445 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75f7x\" (UniqueName: \"kubernetes.io/projected/2e31d232-2138-4dd6-9b7d-71f87e414a01-kube-api-access-75f7x\") pod \"control-plane-machine-set-operator-78cbb6b69f-2hwmg\" (UID: \"2e31d232-2138-4dd6-9b7d-71f87e414a01\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.290703 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.312502 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381394 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381467 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381499 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5368f6e7-edfa-4489-851a-96495696b28f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381547 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381573 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-trusted-ca\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381624 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381702 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381753 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381778 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg6pk\" (UniqueName: \"kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381809 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfwl9\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-kube-api-access-nfwl9\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381846 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-certificates\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.381877 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.381901 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.881885495 +0000 UTC m=+138.460001320 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382268 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382362 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382391 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/115c791a-d5a3-4b90-b234-a5305f383cd9-tmpfs\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382414 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-config\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382437 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1171149-01a6-41c8-9f28-a6428ccc5127-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382474 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382505 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382522 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74d68a97-0d9e-4eb0-886f-a61ae22935ab-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382557 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382575 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382600 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-bound-sa-token\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382629 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/115c791a-d5a3-4b90-b234-a5305f383cd9-apiservice-cert\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382666 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382685 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382703 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382767 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382785 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382813 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74d68a97-0d9e-4eb0-886f-a61ae22935ab-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382846 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382882 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382914 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-tls\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1171149-01a6-41c8-9f28-a6428ccc5127-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/115c791a-d5a3-4b90-b234-a5305f383cd9-webhook-cert\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382966 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.382985 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q5tt\" (UniqueName: \"kubernetes.io/projected/d1171149-01a6-41c8-9f28-a6428ccc5127-kube-api-access-8q5tt\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383004 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383045 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5368f6e7-edfa-4489-851a-96495696b28f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383070 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5368f6e7-edfa-4489-851a-96495696b28f-config\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383087 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383116 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7k9q\" (UniqueName: \"kubernetes.io/projected/115c791a-d5a3-4b90-b234-a5305f383cd9-kube-api-access-s7k9q\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383134 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383151 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383180 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383198 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383231 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383248 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383305 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383635 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.383820 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.384295 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-image-import-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.384576 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.384668 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-serving-ca\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.385411 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcb4689a-1339-43ea-b525-787e4a35e05c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.385960 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.385973 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.386644 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.386835 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.387659 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a3b03ed7-b33f-411b-aa66-d06360af63d1-audit\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.389681 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.390056 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.390321 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-config\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.391110 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.392107 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/1a76da48-177b-429e-a136-d78afeae02aa-images\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.394633 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dcb4689a-1339-43ea-b525-787e4a35e05c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-xnb62\" (UID: \"dcb4689a-1339-43ea-b525-787e4a35e05c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.399710 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.399850 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.402239 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" event={"ID":"5e69f07b-6904-4637-9b6a-95d9714f036a","Type":"ContainerStarted","Data":"b0fa458d29e91dbb14127513dedae1ab44a07b1821afcfcbd2bc18e01aa9a5e1"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.402809 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg6pk\" (UniqueName: \"kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk\") pod \"controller-manager-879f6c89f-r5t5b\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.399832 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.403097 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-etcd-client\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.402747 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-encryption-config\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.405763 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3b03ed7-b33f-411b-aa66-d06360af63d1-serving-cert\") pod \"apiserver-76f77b778f-xqd5z\" (UID: \"a3b03ed7-b33f-411b-aa66-d06360af63d1\") " pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.406462 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/1a76da48-177b-429e-a136-d78afeae02aa-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.407006 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" event={"ID":"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e","Type":"ContainerStarted","Data":"c3f100440793939da7c9bd0aea27a23c54891932aab96cce78e6faaa7721a7af"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.407247 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.407335 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.407546 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.408362 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.409882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.411253 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" event={"ID":"77e4f557-fa21-4034-9a3f-f347f3c219e9","Type":"ContainerStarted","Data":"cc315e4c16f41c8b2c44497c4ae4eb758632754b2e3dffb01e210971cbf577d7"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.411310 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" event={"ID":"77e4f557-fa21-4034-9a3f-f347f3c219e9","Type":"ContainerStarted","Data":"b42c41ee1bdbd116dcfefa20f8106ce4cda5150b2eee90e4f67c3cc61e5b619c"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.416237 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" event={"ID":"88ff6e28-5128-42f0-8280-79874dd5431e","Type":"ContainerStarted","Data":"8a36b1c034be877c0da6dfa3b64df1e0325c15bab2426e3a696f7083c9ac0327"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.419106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" event={"ID":"ea377cf5-fbd8-462d-bfea-dd3aca3da018","Type":"ContainerStarted","Data":"108029edb25e91ff04a4445e50163c7808179e8f6451ae20a9a52d248631a120"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.419213 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.422107 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" event={"ID":"47082a42-2a2d-4d25-bbbe-ee688cd9599a","Type":"ContainerStarted","Data":"f77a726c5d907e3496a4af76ea763469ba1169f7efa5db924aa34128ca588538"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.422163 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" event={"ID":"47082a42-2a2d-4d25-bbbe-ee688cd9599a","Type":"ContainerStarted","Data":"db4e151e670409dfe32f46a0be93f8e6e5eff0172cabfee980979367ec62d714"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.422737 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.423712 4773 patch_prober.go:28] interesting pod/console-operator-58897d9998-vnsx2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/readyz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.423773 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" podUID="47082a42-2a2d-4d25-bbbe-ee688cd9599a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/readyz\": dial tcp 10.217.0.30:8443: connect: connection refused" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.430778 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" event={"ID":"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c","Type":"ContainerStarted","Data":"e684c006462a217738add27ebca6e5806b26b69a02464a44b87364590fa1eb42"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.430857 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" event={"ID":"622237ca-9ca0-40b2-a9ca-7ecb7cf7c90c","Type":"ContainerStarted","Data":"7e869046cdfa4575b291ca4e6012b155f71840b024ec5469e6d54929d3671b6b"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.436733 4773 generic.go:334] "Generic (PLEG): container finished" podID="d12eac55-c003-4ed2-9b5f-aec3270b23ed" containerID="ad585f4b9f33a7d2ab74d1cc6b3b7797ecfdf84a953a9412fd692ecddbdc0e6f" exitCode=0 Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.436826 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" event={"ID":"d12eac55-c003-4ed2-9b5f-aec3270b23ed","Type":"ContainerDied","Data":"ad585f4b9f33a7d2ab74d1cc6b3b7797ecfdf84a953a9412fd692ecddbdc0e6f"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.436851 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" event={"ID":"d12eac55-c003-4ed2-9b5f-aec3270b23ed","Type":"ContainerStarted","Data":"9d995a251b1a0524d125274f67affb1399561af61f7919ebdcce9ec10f6bea2e"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.439751 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-zmgk2" event={"ID":"80b4c3da-59cb-4154-ade0-92772eb0fe8a","Type":"ContainerStarted","Data":"d8f6603e81a42e309513af62ab131ff9feecb9020c5a5c792bb050b093859dff"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.454009 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-trjxm" event={"ID":"943bb747-f86c-4422-b99f-97a7fdb07b87","Type":"ContainerStarted","Data":"ecd85e337b8497ddb0229d4485e07f4152251c850b927225666c47f981cc0b58"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.454063 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-trjxm" event={"ID":"943bb747-f86c-4422-b99f-97a7fdb07b87","Type":"ContainerStarted","Data":"8d51191fb14570263d9beeb2eafa95469a0d7f56030ec88f9e85486527d74866"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.456882 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.458526 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" event={"ID":"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af","Type":"ContainerStarted","Data":"a83b2520ead8e67a07be01ec0fd44cee5320430114113cf5d153132086ce1233"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.458577 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" event={"ID":"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af","Type":"ContainerStarted","Data":"09018fd0c7b5e78e9da06dd6c57aa6551c8bf1d47a863f3a1dc26e622c5610d0"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.463485 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lrxk6" event={"ID":"139ce7fa-7d44-4233-b4e9-b7827ce4c68d","Type":"ContainerStarted","Data":"25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.467104 4773 patch_prober.go:28] interesting pod/downloads-7954f5f757-trjxm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.467145 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-trjxm" podUID="943bb747-f86c-4422-b99f-97a7fdb07b87" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.475736 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" event={"ID":"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd","Type":"ContainerStarted","Data":"88220d6b49bd146c3c3dd06b6d049391c5b7c44893923777cae62a69aeee7d12"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.475790 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" event={"ID":"f8c75f51-a4f1-4ce3-9e40-f90eaf21cebd","Type":"ContainerStarted","Data":"8b44ced21c937ed40a4c566ff090408441b54afcaac9f72c4904c3d54f13d103"} Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.486822 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1171149-01a6-41c8-9f28-a6428ccc5127-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.487116 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.987091009 +0000 UTC m=+138.565206894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487270 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74d68a97-0d9e-4eb0-886f-a61ae22935ab-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487335 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhqth\" (UniqueName: \"kubernetes.io/projected/e6833a75-2530-4fb4-9740-af41ec49d5e5-kube-api-access-jhqth\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487522 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-bound-sa-token\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487696 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/115c791a-d5a3-4b90-b234-a5305f383cd9-apiservice-cert\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487771 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5ggv\" (UniqueName: \"kubernetes.io/projected/44297f57-b260-417f-b6eb-a255f41cd584-kube-api-access-n5ggv\") pod \"ingress-canary-56h2q\" (UID: \"44297f57-b260-417f-b6eb-a255f41cd584\") " pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487844 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487871 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74d68a97-0d9e-4eb0-886f-a61ae22935ab-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487916 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-csi-data-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487934 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-socket-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-certs\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.487986 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44297f57-b260-417f-b6eb-a255f41cd584-cert\") pod \"ingress-canary-56h2q\" (UID: \"44297f57-b260-417f-b6eb-a255f41cd584\") " pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488095 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-tls\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488126 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1171149-01a6-41c8-9f28-a6428ccc5127-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488165 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/115c791a-d5a3-4b90-b234-a5305f383cd9-webhook-cert\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488189 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488220 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q5tt\" (UniqueName: \"kubernetes.io/projected/d1171149-01a6-41c8-9f28-a6428ccc5127-kube-api-access-8q5tt\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488398 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4ddb\" (UniqueName: \"kubernetes.io/projected/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-kube-api-access-j4ddb\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488434 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-plugins-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5368f6e7-edfa-4489-851a-96495696b28f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488589 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5368f6e7-edfa-4489-851a-96495696b28f-config\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.488672 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7k9q\" (UniqueName: \"kubernetes.io/projected/115c791a-d5a3-4b90-b234-a5305f383cd9-kube-api-access-s7k9q\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489041 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-node-bootstrap-token\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489137 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489159 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5368f6e7-edfa-4489-851a-96495696b28f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489216 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-mountpoint-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489245 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-trusted-ca\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489415 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfwl9\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-kube-api-access-nfwl9\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.489477 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xv2v\" (UniqueName: \"kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.490319 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:10.990304309 +0000 UTC m=+138.568420224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.490457 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-certificates\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.490501 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-registration-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.490524 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-config\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.490542 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/115c791a-d5a3-4b90-b234-a5305f383cd9-tmpfs\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.490724 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.490936 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/115c791a-d5a3-4b90-b234-a5305f383cd9-tmpfs\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.491732 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74d68a97-0d9e-4eb0-886f-a61ae22935ab-ca-trust-extracted\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.493393 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-certificates\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.493640 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1171149-01a6-41c8-9f28-a6428ccc5127-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.501304 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5368f6e7-edfa-4489-851a-96495696b28f-config\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.501348 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74d68a97-0d9e-4eb0-886f-a61ae22935ab-installation-pull-secrets\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.501697 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-config\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.502570 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/115c791a-d5a3-4b90-b234-a5305f383cd9-apiservice-cert\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.502669 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-trusted-ca\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.507664 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1171149-01a6-41c8-9f28-a6428ccc5127-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.514918 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.516582 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5368f6e7-edfa-4489-851a-96495696b28f-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.518909 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xv2v\" (UniqueName: \"kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v\") pod \"oauth-openshift-558db77b4-nwwct\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.531841 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/115c791a-d5a3-4b90-b234-a5305f383cd9-webhook-cert\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.531881 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.532158 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.536991 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-tls\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.548668 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q5tt\" (UniqueName: \"kubernetes.io/projected/d1171149-01a6-41c8-9f28-a6428ccc5127-kube-api-access-8q5tt\") pod \"kube-storage-version-migrator-operator-b67b599dd-bw97w\" (UID: \"d1171149-01a6-41c8-9f28-a6428ccc5127\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.557252 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.561168 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfwl9\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-kube-api-access-nfwl9\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.564094 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.577860 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.579305 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7k9q\" (UniqueName: \"kubernetes.io/projected/115c791a-d5a3-4b90-b234-a5305f383cd9-kube-api-access-s7k9q\") pod \"packageserver-d55dfcdfc-jg6k4\" (UID: \"115c791a-d5a3-4b90-b234-a5305f383cd9\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.586862 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9b87d42a-ede1-4dbc-8d74-a01d06df4d19-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cqcxg\" (UID: \"9b87d42a-ede1-4dbc-8d74-a01d06df4d19\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591032 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591242 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-mountpoint-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591329 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-registration-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591395 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhqth\" (UniqueName: \"kubernetes.io/projected/e6833a75-2530-4fb4-9740-af41ec49d5e5-kube-api-access-jhqth\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591481 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5ggv\" (UniqueName: \"kubernetes.io/projected/44297f57-b260-417f-b6eb-a255f41cd584-kube-api-access-n5ggv\") pod \"ingress-canary-56h2q\" (UID: \"44297f57-b260-417f-b6eb-a255f41cd584\") " pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591563 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-csi-data-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591589 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-socket-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591612 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-certs\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591662 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44297f57-b260-417f-b6eb-a255f41cd584-cert\") pod \"ingress-canary-56h2q\" (UID: \"44297f57-b260-417f-b6eb-a255f41cd584\") " pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591777 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4ddb\" (UniqueName: \"kubernetes.io/projected/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-kube-api-access-j4ddb\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591805 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-plugins-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.591965 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-node-bootstrap-token\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.592166 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.092135717 +0000 UTC m=+138.670251602 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.592341 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-mountpoint-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.593844 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-registration-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.594756 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-csi-data-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.595640 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-socket-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.596028 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e6833a75-2530-4fb4-9740-af41ec49d5e5-plugins-dir\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.596413 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-node-bootstrap-token\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.605223 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-certs\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.616181 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/44297f57-b260-417f-b6eb-a255f41cd584-cert\") pod \"ingress-canary-56h2q\" (UID: \"44297f57-b260-417f-b6eb-a255f41cd584\") " pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.627735 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.631994 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5368f6e7-edfa-4489-851a-96495696b28f-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-fmqk7\" (UID: \"5368f6e7-edfa-4489-851a-96495696b28f\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.632539 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-bound-sa-token\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.652595 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.665872 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5ggv\" (UniqueName: \"kubernetes.io/projected/44297f57-b260-417f-b6eb-a255f41cd584-kube-api-access-n5ggv\") pod \"ingress-canary-56h2q\" (UID: \"44297f57-b260-417f-b6eb-a255f41cd584\") " pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.681012 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhqth\" (UniqueName: \"kubernetes.io/projected/e6833a75-2530-4fb4-9740-af41ec49d5e5-kube-api-access-jhqth\") pod \"csi-hostpathplugin-64x99\" (UID: \"e6833a75-2530-4fb4-9740-af41ec49d5e5\") " pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.693168 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.693616 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:10 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:10 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:10 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.693696 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.693945 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.193929434 +0000 UTC m=+138.772045259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.709496 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-56h2q" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.717060 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4ddb\" (UniqueName: \"kubernetes.io/projected/f5a38fb1-4a72-46da-9968-3111ff2ed6b3-kube-api-access-j4ddb\") pod \"machine-config-server-6s7m8\" (UID: \"f5a38fb1-4a72-46da-9968-3111ff2ed6b3\") " pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.718156 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6s7m8" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.744438 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-64x99" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.797008 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.797638 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57bqd\" (UniqueName: \"kubernetes.io/projected/1a76da48-177b-429e-a136-d78afeae02aa-kube-api-access-57bqd\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.798294 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.29825791 +0000 UTC m=+138.876373745 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.810907 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57bqd\" (UniqueName: \"kubernetes.io/projected/1a76da48-177b-429e-a136-d78afeae02aa-kube-api-access-57bqd\") pod \"machine-api-operator-5694c8668f-jm24g\" (UID: \"1a76da48-177b-429e-a136-d78afeae02aa\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.859871 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.889274 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.889791 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.898806 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:10 crc kubenswrapper[4773]: E0122 11:57:10.899081 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.399069904 +0000 UTC m=+138.977185729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.918691 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722"] Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.926786 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r844w"] Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.931642 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56"] Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.954706 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw"] Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.955122 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc"] Jan 22 11:57:10 crc kubenswrapper[4773]: I0122 11:57:10.990802 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-5ktrv"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.000098 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.000592 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.500565788 +0000 UTC m=+139.078681623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.013585 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-zmgk2" podStartSLOduration=119.013567132 podStartE2EDuration="1m59.013567132s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.012789108 +0000 UTC m=+138.590904943" watchObservedRunningTime="2026-01-22 11:57:11.013567132 +0000 UTC m=+138.591682957" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.103318 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.105040 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.60501575 +0000 UTC m=+139.183131575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.194209 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-mttmk" podStartSLOduration=119.194190469 podStartE2EDuration="1m59.194190469s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.167904949 +0000 UTC m=+138.746020774" watchObservedRunningTime="2026-01-22 11:57:11.194190469 +0000 UTC m=+138.772306304" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.211668 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.211931 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.711917868 +0000 UTC m=+139.290033693 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.211955 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.229812 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.233471 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8fll2"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.259252 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" podStartSLOduration=119.2592314 podStartE2EDuration="1m59.2592314s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.239774156 +0000 UTC m=+138.817889991" watchObservedRunningTime="2026-01-22 11:57:11.2592314 +0000 UTC m=+138.837347225" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.277860 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.313046 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.313574 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.813557687 +0000 UTC m=+139.391673512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.316601 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" podStartSLOduration=120.316578618 podStartE2EDuration="2m0.316578618s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.315075623 +0000 UTC m=+138.893191458" watchObservedRunningTime="2026-01-22 11:57:11.316578618 +0000 UTC m=+138.894694443" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.415485 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.415695 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.915669858 +0000 UTC m=+139.493785673 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.415929 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.416341 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:11.916331576 +0000 UTC m=+139.494447411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.419456 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.458889 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tnzhm"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.475063 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.503976 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.505538 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6s7m8" event={"ID":"f5a38fb1-4a72-46da-9968-3111ff2ed6b3","Type":"ContainerStarted","Data":"faac147a71da5cc11d9af67d4de28fd62d42c0df74d435efa20e8cd6a0e0a3d5"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.517725 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.519053 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" event={"ID":"57270c31-9344-4b7a-9636-119edbf10dbc","Type":"ContainerStarted","Data":"5a6f2b742073ee79e6b01f3a1d7c522e8899533bc0c430ecc061150ae6357a65"} Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.520001 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.019965963 +0000 UTC m=+139.598081788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.522395 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" event={"ID":"4b2948a0-e876-4813-8f06-b5a82d35b47a","Type":"ContainerStarted","Data":"c4a9dc1ab407d43f8b4c3ee78aa2bc9782ce377d08d5247adc8b68ec368133ef"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.531202 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" event={"ID":"c65efd55-e31e-4f78-9c88-89e9c5d35df8","Type":"ContainerStarted","Data":"44d497235360508070daf50ed9a9d7e03c44136c8fac31dd36ca10d48bd5a94d"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.533511 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-r7gll"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.537853 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" event={"ID":"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8","Type":"ContainerStarted","Data":"795484d77a508ee7df6e98717edab670e85acdb63cdc15d8c457b06391a19ad8"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.538896 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r844w" event={"ID":"6940aa57-4e2c-41c6-a4e0-64081d1208c9","Type":"ContainerStarted","Data":"f72c4e30f4237d65af413a7527bc1c8164925f55d50a3ec2f60304b20169f467"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.540507 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" event={"ID":"88ff6e28-5128-42f0-8280-79874dd5431e","Type":"ContainerStarted","Data":"ed54b4eccb7d6622efcd2ff3d2dde432132ee436236d74ad4162e42851769bd3"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.542985 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" event={"ID":"9ddb15eb-ebaf-4cb3-90b6-6156ffd665af","Type":"ContainerStarted","Data":"9ea1d3862b4ba159548c85107e10aafd867a2b8213dc3a52c77ef98f82e3a833"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.544791 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" event={"ID":"6f6256b7-548b-46ff-9c2a-d95240136df2","Type":"ContainerStarted","Data":"3c1a8b9389f08b3bb5b7d19e133511b01ccd33d3400cad7a82261b4c6bad87ac"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.560842 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" event={"ID":"5e69f07b-6904-4637-9b6a-95d9714f036a","Type":"ContainerStarted","Data":"da3aae4347e02e85e9a2b5d7198be1ca0df40583dc3fbd876884b778f550553d"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.560909 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" event={"ID":"5e69f07b-6904-4637-9b6a-95d9714f036a","Type":"ContainerStarted","Data":"001ff964c8f17ca8680f17d9a7fd9fb2df6703a0baaa5becd68cc2a4896af263"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.567823 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" event={"ID":"3f045d2d-b976-40bb-a002-6dcdfeef75e4","Type":"ContainerStarted","Data":"bb34816bf65fd564cb646d1a9b7700bbe7f1cc623fa925406877c757e1253bb3"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.570957 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" event={"ID":"a8dcf163-8c98-4db0-95e5-1d96e45a619c","Type":"ContainerStarted","Data":"77c0fef9997351ad7e34f3a743454b492b1fbb26f6621a431ca0911c05c64dc4"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.573520 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" event={"ID":"6b6f34a9-cd4e-45a5-b092-d2b0a59b0d9e","Type":"ContainerStarted","Data":"3d2301145495e815feabcaf72dfb4fe816cc7414913194601240bf3fdb1188f8"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.574448 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5ktrv" event={"ID":"866b7e00-9f0a-4b4d-9b5b-91457bcd2409","Type":"ContainerStarted","Data":"43e1ad968ce9a09bfe563f66472e37d51af82c960f505a392a6dc78132e0a2e7"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.575652 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" event={"ID":"e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1","Type":"ContainerStarted","Data":"525ea3283988b04331613994129d79a346e307c4de47b90fb68e19ecda408da4"} Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.577190 4773 patch_prober.go:28] interesting pod/downloads-7954f5f757-trjxm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.577229 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-trjxm" podUID="943bb747-f86c-4422-b99f-97a7fdb07b87" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.607522 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-lrxk6" podStartSLOduration=120.607498761 podStartE2EDuration="2m0.607498761s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.598850025 +0000 UTC m=+139.176965870" watchObservedRunningTime="2026-01-22 11:57:11.607498761 +0000 UTC m=+139.185614586" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.619680 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.620045 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.120032445 +0000 UTC m=+139.698148270 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.657834 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:11 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:11 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:11 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.658215 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.720732 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gqc2d" podStartSLOduration=120.720714523 podStartE2EDuration="2m0.720714523s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.720038204 +0000 UTC m=+139.298154049" watchObservedRunningTime="2026-01-22 11:57:11.720714523 +0000 UTC m=+139.298830348" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.723965 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.727165 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.227142962 +0000 UTC m=+139.805258857 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.750068 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg"] Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.760499 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b093dc7_d5ee_409a_9c1c_e003686d44e4.slice/crio-c0c80447e29c19ebbec6f07be0636efba9a47132450d26ca0426501175cea1d9 WatchSource:0}: Error finding container c0c80447e29c19ebbec6f07be0636efba9a47132450d26ca0426501175cea1d9: Status 404 returned error can't find the container with id c0c80447e29c19ebbec6f07be0636efba9a47132450d26ca0426501175cea1d9 Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.764986 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xqd5z"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.768318 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-r5t5b"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.811726 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-56h2q"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.818057 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w"] Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.818847 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3b03ed7_b33f_411b_aa66_d06360af63d1.slice/crio-9284a7150d55f60b5d08f87c7c6342d312e33277097ef9bbcac5406a1eea58ea WatchSource:0}: Error finding container 9284a7150d55f60b5d08f87c7c6342d312e33277097ef9bbcac5406a1eea58ea: Status 404 returned error can't find the container with id 9284a7150d55f60b5d08f87c7c6342d312e33277097ef9bbcac5406a1eea58ea Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.823992 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.825068 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-vnsx2" Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.825722 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-64x99"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.825758 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.826120 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.326106956 +0000 UTC m=+139.904222781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.827915 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwwct"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.830434 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.832611 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-jm24g"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.834923 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.843797 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7"] Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.880798 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-trjxm" podStartSLOduration=120.880779798 podStartE2EDuration="2m0.880779798s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:11.880313698 +0000 UTC m=+139.458429523" watchObservedRunningTime="2026-01-22 11:57:11.880779798 +0000 UTC m=+139.458895633" Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.889636 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod143cf6af_d9df_44fc_8281_17ee0a86807c.slice/crio-ba989a4aff6faef36ca241ddcea8cce4d90c678b1a2bc3d0216a7246f1ba4e0a WatchSource:0}: Error finding container ba989a4aff6faef36ca241ddcea8cce4d90c678b1a2bc3d0216a7246f1ba4e0a: Status 404 returned error can't find the container with id ba989a4aff6faef36ca241ddcea8cce4d90c678b1a2bc3d0216a7246f1ba4e0a Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.912891 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1171149_01a6_41c8_9f28_a6428ccc5127.slice/crio-e191fe8b26c3c1b54f11eb628410cd113891b9a2c9004c51969fb8837c99d22f WatchSource:0}: Error finding container e191fe8b26c3c1b54f11eb628410cd113891b9a2c9004c51969fb8837c99d22f: Status 404 returned error can't find the container with id e191fe8b26c3c1b54f11eb628410cd113891b9a2c9004c51969fb8837c99d22f Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.919736 4773 csr.go:261] certificate signing request csr-ppj56 is approved, waiting to be issued Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.925665 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod115c791a_d5a3_4b90_b234_a5305f383cd9.slice/crio-fa1af10bfbebfb2fcf93bbbad19e464b1d7331a52da941af35724d6cb79cdc14 WatchSource:0}: Error finding container fa1af10bfbebfb2fcf93bbbad19e464b1d7331a52da941af35724d6cb79cdc14: Status 404 returned error can't find the container with id fa1af10bfbebfb2fcf93bbbad19e464b1d7331a52da941af35724d6cb79cdc14 Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.926268 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:11 crc kubenswrapper[4773]: E0122 11:57:11.926664 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.426651108 +0000 UTC m=+140.004766933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:11 crc kubenswrapper[4773]: I0122 11:57:11.933387 4773 csr.go:257] certificate signing request csr-ppj56 is issued Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.941552 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6833a75_2530_4fb4_9740_af41ec49d5e5.slice/crio-f8c5c36fb638ceac4be3c2b1b3d400d69e08f75d5a778364e86f30a9546a09f2 WatchSource:0}: Error finding container f8c5c36fb638ceac4be3c2b1b3d400d69e08f75d5a778364e86f30a9546a09f2: Status 404 returned error can't find the container with id f8c5c36fb638ceac4be3c2b1b3d400d69e08f75d5a778364e86f30a9546a09f2 Jan 22 11:57:11 crc kubenswrapper[4773]: W0122 11:57:11.955513 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b87d42a_ede1_4dbc_8d74_a01d06df4d19.slice/crio-8ff4cd2a3bdac54900b7374a853ed6e8ccd766b0565cde7ebab17da99bc5a23a WatchSource:0}: Error finding container 8ff4cd2a3bdac54900b7374a853ed6e8ccd766b0565cde7ebab17da99bc5a23a: Status 404 returned error can't find the container with id 8ff4cd2a3bdac54900b7374a853ed6e8ccd766b0565cde7ebab17da99bc5a23a Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.027470 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.027944 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.527929183 +0000 UTC m=+140.106045008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.129195 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.129471 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.629446897 +0000 UTC m=+140.207562722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.129580 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.130030 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.630022802 +0000 UTC m=+140.208138627 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.166758 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rpgsr" podStartSLOduration=121.166734305 podStartE2EDuration="2m1.166734305s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.158650134 +0000 UTC m=+139.736765969" watchObservedRunningTime="2026-01-22 11:57:12.166734305 +0000 UTC m=+139.744850130" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.231209 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.231741 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.731723115 +0000 UTC m=+140.309838940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.247354 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-29862" podStartSLOduration=121.247336372 podStartE2EDuration="2m1.247336372s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.246660423 +0000 UTC m=+139.824776248" watchObservedRunningTime="2026-01-22 11:57:12.247336372 +0000 UTC m=+139.825452187" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.280500 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-fwt28" podStartSLOduration=121.28048222 podStartE2EDuration="2m1.28048222s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.278934883 +0000 UTC m=+139.857050708" watchObservedRunningTime="2026-01-22 11:57:12.28048222 +0000 UTC m=+139.858598045" Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.333733 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.83371532 +0000 UTC m=+140.411831145 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.334743 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.335750 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pd2kc" podStartSLOduration=122.335736078 podStartE2EDuration="2m2.335736078s" podCreationTimestamp="2026-01-22 11:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.332631263 +0000 UTC m=+139.910747098" watchObservedRunningTime="2026-01-22 11:57:12.335736078 +0000 UTC m=+139.913851903" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.435512 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.435725 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.935689044 +0000 UTC m=+140.513804869 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.435927 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.436185 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:12.936178646 +0000 UTC m=+140.514294471 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.540062 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.540461 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.04044622 +0000 UTC m=+140.618562045 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.582459 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" event={"ID":"d1171149-01a6-41c8-9f28-a6428ccc5127","Type":"ContainerStarted","Data":"e191fe8b26c3c1b54f11eb628410cd113891b9a2c9004c51969fb8837c99d22f"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.590177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" event={"ID":"bfd1a7c2-cdbc-4c02-97b4-6a09476397b8","Type":"ContainerStarted","Data":"67e41661a0ce6bd7e69cecdf913f8f5c3606eec6aec0c2c32f968bfd95383c88"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.597595 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" event={"ID":"1a76da48-177b-429e-a136-d78afeae02aa","Type":"ContainerStarted","Data":"c4dd9860084496e68c23955644ac79c8d817374c92a02acb0c5b4d38c7e3dd91"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.604705 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" event={"ID":"c65efd55-e31e-4f78-9c88-89e9c5d35df8","Type":"ContainerStarted","Data":"ada9375547d1125c0a31c453835e61ef66a670b8cbb5143cff9b907f15397cd3"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.605500 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.618691 4773 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2rj56 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.618748 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" podUID="c65efd55-e31e-4f78-9c88-89e9c5d35df8" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.632588 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-6h722" podStartSLOduration=121.632576917 podStartE2EDuration="2m1.632576917s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.632141918 +0000 UTC m=+140.210257733" watchObservedRunningTime="2026-01-22 11:57:12.632576917 +0000 UTC m=+140.210692742" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.633547 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" event={"ID":"ba3fd722-161e-4047-bd23-6b6d5306a6ee","Type":"ContainerStarted","Data":"6233e2b7aff2bf2d404e1a04193cbafe3975ccb6d26d3ab45275858c598448fd"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.633593 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" event={"ID":"ba3fd722-161e-4047-bd23-6b6d5306a6ee","Type":"ContainerStarted","Data":"aeb10d6c27bff110c31a7e70eca3a1186d64714e79cf3fe95278ec0860884bfb"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.634092 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mkth4" podStartSLOduration=120.634087193 podStartE2EDuration="2m0.634087193s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.358421712 +0000 UTC m=+139.936537557" watchObservedRunningTime="2026-01-22 11:57:12.634087193 +0000 UTC m=+140.212203018" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.637830 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" event={"ID":"4b093dc7-d5ee-409a-9c1c-e003686d44e4","Type":"ContainerStarted","Data":"edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.637876 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" event={"ID":"4b093dc7-d5ee-409a-9c1c-e003686d44e4","Type":"ContainerStarted","Data":"c0c80447e29c19ebbec6f07be0636efba9a47132450d26ca0426501175cea1d9"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.638058 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.646378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" event={"ID":"9b87d42a-ede1-4dbc-8d74-a01d06df4d19","Type":"ContainerStarted","Data":"8ff4cd2a3bdac54900b7374a853ed6e8ccd766b0565cde7ebab17da99bc5a23a"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.646914 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.647416 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.14740368 +0000 UTC m=+140.725519505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.650116 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-64x99" event={"ID":"e6833a75-2530-4fb4-9740-af41ec49d5e5","Type":"ContainerStarted","Data":"f8c5c36fb638ceac4be3c2b1b3d400d69e08f75d5a778364e86f30a9546a09f2"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.650208 4773 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tnzhm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.650228 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.657081 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:12 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:12 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:12 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.657129 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.698726 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6s7m8" event={"ID":"f5a38fb1-4a72-46da-9968-3111ff2ed6b3","Type":"ContainerStarted","Data":"6bad44e87da3bac47195d31f8e36d586497e7f57cd66d4e5dfa7928c02fab98b"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.698767 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" event={"ID":"115c791a-d5a3-4b90-b234-a5305f383cd9","Type":"ContainerStarted","Data":"fa1af10bfbebfb2fcf93bbbad19e464b1d7331a52da941af35724d6cb79cdc14"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.715149 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" event={"ID":"4b2948a0-e876-4813-8f06-b5a82d35b47a","Type":"ContainerStarted","Data":"1ec747250bbf8db02d7bf33a363279dd89d91c189c5e26c36e3d6a5ce26d95df"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.754835 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" event={"ID":"244704c5-6799-48ab-9c90-8c38ae7f3c5e","Type":"ContainerStarted","Data":"36c001eb260d289d3912862df096885341c57c320d0d8b3abfcabd4928300dbc"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.754956 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.756431 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.25641297 +0000 UTC m=+140.834528795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.757412 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" podStartSLOduration=120.757394933 podStartE2EDuration="2m0.757394933s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.754988488 +0000 UTC m=+140.333104333" watchObservedRunningTime="2026-01-22 11:57:12.757394933 +0000 UTC m=+140.335510768" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.795500 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" event={"ID":"d12eac55-c003-4ed2-9b5f-aec3270b23ed","Type":"ContainerStarted","Data":"eb1d7ce0fa4cd0ba22b75b34ee7eb44456f559e22f1a4ca4e90431c0a91f23b3"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.798383 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" event={"ID":"4db3dc80-10df-4d72-925f-ab5c927bc6ef","Type":"ContainerStarted","Data":"a565c7c274ccc65d6ae3457d80e53b527ab861b93d1caa877bcb03c851b1cf84"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.833963 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" event={"ID":"6f6256b7-548b-46ff-9c2a-d95240136df2","Type":"ContainerStarted","Data":"84c9b988877fd9c608cd173e05b64d9ba1c73cfd6a530b6a3b0773085d653690"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.846133 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" event={"ID":"a3b03ed7-b33f-411b-aa66-d06360af63d1","Type":"ContainerStarted","Data":"9284a7150d55f60b5d08f87c7c6342d312e33277097ef9bbcac5406a1eea58ea"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.849731 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" event={"ID":"65999965-0f9c-435c-8687-a50033c9d661","Type":"ContainerStarted","Data":"4f8fe3093cc1778d867714dc3ae1488d00e9b1f723f1b4bf7209bdb2e53dea69"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.849771 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" event={"ID":"65999965-0f9c-435c-8687-a50033c9d661","Type":"ContainerStarted","Data":"a2119ccfc45b0d36a8cd26e4cb68cbf08081954c61af145d88d33d66a0e3fb72"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.853600 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r844w" event={"ID":"6940aa57-4e2c-41c6-a4e0-64081d1208c9","Type":"ContainerStarted","Data":"592cd78d0141fe79bfd6b50da84cfbde3676feb3ceb47167b55f20127ad59024"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.856834 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.857098 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.357084708 +0000 UTC m=+140.935200533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.858689 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" event={"ID":"2e31d232-2138-4dd6-9b7d-71f87e414a01","Type":"ContainerStarted","Data":"2fc741098c6c1e58258e3a98a997dfe261be3132f30685104a1a73fc92b93cc4"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.877986 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" podStartSLOduration=120.877968664 podStartE2EDuration="2m0.877968664s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.877009983 +0000 UTC m=+140.455125808" watchObservedRunningTime="2026-01-22 11:57:12.877968664 +0000 UTC m=+140.456084489" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.902553 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" event={"ID":"143cf6af-d9df-44fc-8281-17ee0a86807c","Type":"ContainerStarted","Data":"ba989a4aff6faef36ca241ddcea8cce4d90c678b1a2bc3d0216a7246f1ba4e0a"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.909553 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" event={"ID":"3f045d2d-b976-40bb-a002-6dcdfeef75e4","Type":"ContainerStarted","Data":"24cc6628c9ff95edc7f4d89d3e7c9ed95d4df0391a383392f6ffc0991c9d058d"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.909960 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.917013 4773 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s9vvb container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.917076 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" podUID="3f045d2d-b976-40bb-a002-6dcdfeef75e4" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/healthz\": dial tcp 10.217.0.23:8443: connect: connection refused" Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.923478 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" event={"ID":"5368f6e7-edfa-4489-851a-96495696b28f","Type":"ContainerStarted","Data":"1a5da5fec1b5b00a9e568f32e7300966882e91a41e60ff2df4d7af4d63625a16"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.927197 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" event={"ID":"dcb4689a-1339-43ea-b525-787e4a35e05c","Type":"ContainerStarted","Data":"8fe1f58962040b745222f5b5e2539b01cbad054a11138c882f6a423f8d8006c7"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.943372 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-22 11:52:11 +0000 UTC, rotation deadline is 2026-11-21 02:50:00.692271271 +0000 UTC Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.943446 4773 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7262h52m47.748830136s for next certificate rotation Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.949118 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5ktrv" event={"ID":"866b7e00-9f0a-4b4d-9b5b-91457bcd2409","Type":"ContainerStarted","Data":"39081c6fb746ac629c8eb187a88d9d7e32015e61b42ce747d05bef5646869c22"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.954799 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-56h2q" event={"ID":"44297f57-b260-417f-b6eb-a255f41cd584","Type":"ContainerStarted","Data":"1783f62a89bed0a32e700a53bc2f92f54e98febb9f59c7971988f064706b3999"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.954868 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-56h2q" event={"ID":"44297f57-b260-417f-b6eb-a255f41cd584","Type":"ContainerStarted","Data":"7efb382c9eea061c3f562baf36570403505ddc43ef8e277c33c4aef5d421e99e"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.957710 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.958738 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" event={"ID":"a36fde19-2db8-4b4d-9702-a8a3a20a77e1","Type":"ContainerStarted","Data":"c37500e3eb707236cf8cad1a7afa5f242e2b8c0b7c0f58cf7e8a6240242d647c"} Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.958772 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" event={"ID":"a36fde19-2db8-4b4d-9702-a8a3a20a77e1","Type":"ContainerStarted","Data":"ad1fde9008674da9e2f7e5921b25927426fe05ecc3f59930dea17612f9a7ed50"} Jan 22 11:57:12 crc kubenswrapper[4773]: E0122 11:57:12.968669 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.468649109 +0000 UTC m=+141.046764934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:12 crc kubenswrapper[4773]: I0122 11:57:12.969392 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6s7m8" podStartSLOduration=6.96937039 podStartE2EDuration="6.96937039s" podCreationTimestamp="2026-01-22 11:57:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:12.968820066 +0000 UTC m=+140.546935881" watchObservedRunningTime="2026-01-22 11:57:12.96937039 +0000 UTC m=+140.547486215" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.004961 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" event={"ID":"57270c31-9344-4b7a-9636-119edbf10dbc","Type":"ContainerStarted","Data":"d444d9744e3f13064ef2d4ca8ba690243f3b2f7022342cca3b5dd1df4291029a"} Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.020089 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" event={"ID":"e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1","Type":"ContainerStarted","Data":"d4c32aa453f83379c16b30d8aaa43ed333bf20eb329c53c48ad2dc165b41ce57"} Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.059678 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.061412 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.561400463 +0000 UTC m=+141.139516288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.163185 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.164399 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.664384761 +0000 UTC m=+141.242500586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.271252 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.271608 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.771593223 +0000 UTC m=+141.349709048 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.271756 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-h7fz7" podStartSLOduration=121.271740939 podStartE2EDuration="2m1.271740939s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:13.269655319 +0000 UTC m=+140.847771144" watchObservedRunningTime="2026-01-22 11:57:13.271740939 +0000 UTC m=+140.849856764" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.337986 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-56h2q" podStartSLOduration=7.3379610920000005 podStartE2EDuration="7.337961092s" podCreationTimestamp="2026-01-22 11:57:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:13.330629124 +0000 UTC m=+140.908744959" watchObservedRunningTime="2026-01-22 11:57:13.337961092 +0000 UTC m=+140.916076917" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.402987 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" podStartSLOduration=122.402969953 podStartE2EDuration="2m2.402969953s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:13.366590795 +0000 UTC m=+140.944706620" watchObservedRunningTime="2026-01-22 11:57:13.402969953 +0000 UTC m=+140.981085778" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.404212 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.405496 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:13.905481422 +0000 UTC m=+141.483597247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.476906 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" podStartSLOduration=121.47688789 podStartE2EDuration="2m1.47688789s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:13.436585412 +0000 UTC m=+141.014701247" watchObservedRunningTime="2026-01-22 11:57:13.47688789 +0000 UTC m=+141.055003715" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.491055 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" podStartSLOduration=121.491040434 podStartE2EDuration="2m1.491040434s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:13.488944443 +0000 UTC m=+141.067060278" watchObservedRunningTime="2026-01-22 11:57:13.491040434 +0000 UTC m=+141.069156259" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.513124 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.513609 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.013593093 +0000 UTC m=+141.591708918 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.615508 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.616149 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.116129692 +0000 UTC m=+141.694245517 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.660516 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:13 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:13 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:13 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.660563 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.717490 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.717749 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.21773893 +0000 UTC m=+141.795854755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.807023 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.807370 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.818255 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.818788 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.318774014 +0000 UTC m=+141.896889839 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.919709 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:13 crc kubenswrapper[4773]: E0122 11:57:13.920002 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.419989455 +0000 UTC m=+141.998105280 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:13 crc kubenswrapper[4773]: I0122 11:57:13.926961 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.020428 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.020575 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.520554859 +0000 UTC m=+142.098670694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.020657 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.020962 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.520952286 +0000 UTC m=+142.099068121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.025299 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" event={"ID":"4b2948a0-e876-4813-8f06-b5a82d35b47a","Type":"ContainerStarted","Data":"ab94a96681b2f47c7d8e07151207df5c4503e89a3d139d93e8808d2896efc47a"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.026314 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" event={"ID":"4db3dc80-10df-4d72-925f-ab5c927bc6ef","Type":"ContainerStarted","Data":"eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.026506 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.027712 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-64x99" event={"ID":"e6833a75-2530-4fb4-9740-af41ec49d5e5","Type":"ContainerStarted","Data":"259cc3629a3cc4288387629bfe9074129ee8e0d70d5ea7cf58fd9fe8dac2adee"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.028133 4773 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-nwwct container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.6:6443/healthz\": dial tcp 10.217.0.6:6443: connect: connection refused" start-of-body= Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.028179 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" podUID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.6:6443/healthz\": dial tcp 10.217.0.6:6443: connect: connection refused" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.029571 4773 generic.go:334] "Generic (PLEG): container finished" podID="a3b03ed7-b33f-411b-aa66-d06360af63d1" containerID="89d3d81edde462128321e7aefbbeff3f02433dad63ea14a707336c0a85b077d8" exitCode=0 Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.029721 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" event={"ID":"a3b03ed7-b33f-411b-aa66-d06360af63d1","Type":"ContainerDied","Data":"89d3d81edde462128321e7aefbbeff3f02433dad63ea14a707336c0a85b077d8"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.031478 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" event={"ID":"a8dcf163-8c98-4db0-95e5-1d96e45a619c","Type":"ContainerStarted","Data":"ce0f364ec23d46638c2b8943d476778894a8a9b7faa8359e7f4764cc1c6c3d67"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.043272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5ktrv" event={"ID":"866b7e00-9f0a-4b4d-9b5b-91457bcd2409","Type":"ContainerStarted","Data":"396cc62721294029a6dfb0f94551fac4ad8b37d20d70422105e8cc91b39ffc2d"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.043374 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.045064 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r844w" event={"ID":"6940aa57-4e2c-41c6-a4e0-64081d1208c9","Type":"ContainerStarted","Data":"ab69a125efd158923ee7074c2f2e6937744c0b94497ef80f906f73fc95b68f8b"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.046460 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-xnb62" event={"ID":"dcb4689a-1339-43ea-b525-787e4a35e05c","Type":"ContainerStarted","Data":"4dbd68bc12ae66d2fd69541facf8760e74f402ae6733b1afed5660c9c5a81ca3"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.048141 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" event={"ID":"ba3fd722-161e-4047-bd23-6b6d5306a6ee","Type":"ContainerStarted","Data":"c30c8ff17620adf48e771785961a6b9a91132737ca739321ab34a07c3dcdb1ea"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.049816 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" event={"ID":"9b87d42a-ede1-4dbc-8d74-a01d06df4d19","Type":"ContainerStarted","Data":"1b161dadff109255702799e9727209a32f8b820a766fac40f0a7e782a3fd424c"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.051262 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" event={"ID":"2e31d232-2138-4dd6-9b7d-71f87e414a01","Type":"ContainerStarted","Data":"1174027a4898b5918b3d39a369b56aa7144fedd8affbcca4a46d5bd864466ce4"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.052799 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" event={"ID":"5368f6e7-edfa-4489-851a-96495696b28f","Type":"ContainerStarted","Data":"f58597ec853e22445e000da3c115bfab50975115f86ee92bc9de5a026eaec060"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.054953 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" event={"ID":"65999965-0f9c-435c-8687-a50033c9d661","Type":"ContainerStarted","Data":"eec05a3ce3ed772df9dbeb3cd68b3290b6aa4ec23aed608b4e581053e21ff57a"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.055432 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.067525 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" event={"ID":"e0dc1dc8-96fa-4338-b5c9-a6a85fe40ba1","Type":"ContainerStarted","Data":"f2dac1deb84f10af986f6f8a51a7003787902e84e20cb2e166875487ed02e553"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.071023 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" event={"ID":"1a76da48-177b-429e-a136-d78afeae02aa","Type":"ContainerStarted","Data":"311420d5cafd8f87e9c860602449cd77a4ebbe42f4de01a6b0bfbcb01b70bfc0"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.071061 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" event={"ID":"1a76da48-177b-429e-a136-d78afeae02aa","Type":"ContainerStarted","Data":"e8059cd041343efb3089d88bbac28479e7398c103d8060b65ec263795c0c1517"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.073343 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2fmtb" podStartSLOduration=122.073330069 podStartE2EDuration="2m2.073330069s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.071874896 +0000 UTC m=+141.649990721" watchObservedRunningTime="2026-01-22 11:57:14.073330069 +0000 UTC m=+141.651445894" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.081529 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" event={"ID":"143cf6af-d9df-44fc-8281-17ee0a86807c","Type":"ContainerStarted","Data":"3a9609fafdbec2f25ef045edc84b114e7a16cfe30f69618ecb2e2c38a522e1ce"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.082480 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.093296 4773 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-r5t5b container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.093359 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.102586 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cqcxg" podStartSLOduration=122.102567457 podStartE2EDuration="2m2.102567457s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.095382245 +0000 UTC m=+141.673498080" watchObservedRunningTime="2026-01-22 11:57:14.102567457 +0000 UTC m=+141.680683282" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.104378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" event={"ID":"d1171149-01a6-41c8-9f28-a6428ccc5127","Type":"ContainerStarted","Data":"9e76237505a90ed33835a9978c157f3404c909e41aeafee53fe93ae58f55998c"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.122925 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.123020 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.623002264 +0000 UTC m=+142.201118099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.123228 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.125032 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" event={"ID":"6f6256b7-548b-46ff-9c2a-d95240136df2","Type":"ContainerStarted","Data":"3cab743f997dfc1c175102611c4251c37296b9935e362f5079fc3506edb4ecf8"} Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.125124 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.625111165 +0000 UTC m=+142.203227060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.138353 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zrkcm" podStartSLOduration=122.138333039 podStartE2EDuration="2m2.138333039s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.13790281 +0000 UTC m=+141.716018645" watchObservedRunningTime="2026-01-22 11:57:14.138333039 +0000 UTC m=+141.716448864" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.175701 4773 generic.go:334] "Generic (PLEG): container finished" podID="57270c31-9344-4b7a-9636-119edbf10dbc" containerID="d444d9744e3f13064ef2d4ca8ba690243f3b2f7022342cca3b5dd1df4291029a" exitCode=0 Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.175815 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" event={"ID":"57270c31-9344-4b7a-9636-119edbf10dbc","Type":"ContainerDied","Data":"d444d9744e3f13064ef2d4ca8ba690243f3b2f7022342cca3b5dd1df4291029a"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.175856 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" event={"ID":"57270c31-9344-4b7a-9636-119edbf10dbc","Type":"ContainerStarted","Data":"e1f9ae87f3be784eb3873123b38dcf049fffc0a3633ea6c6be5abfabc4a704aa"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.176645 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.193094 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" event={"ID":"115c791a-d5a3-4b90-b234-a5305f383cd9","Type":"ContainerStarted","Data":"32d538ef71030880e887ea457cb89387fb81edc773d98255eea115cbe45c8cf2"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.194307 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.196231 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" podStartSLOduration=123.19619718 podStartE2EDuration="2m3.19619718s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.192715288 +0000 UTC m=+141.770831113" watchObservedRunningTime="2026-01-22 11:57:14.19619718 +0000 UTC m=+141.774313015" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.206683 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" event={"ID":"244704c5-6799-48ab-9c90-8c38ae7f3c5e","Type":"ContainerStarted","Data":"3621b89698de3de71ce8ae389904379d1019e4f6d99b1a6526fb600f56b6a73a"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.206726 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" event={"ID":"244704c5-6799-48ab-9c90-8c38ae7f3c5e","Type":"ContainerStarted","Data":"f5f8527ba7d5d6827a18f5cf1f01f72aad41db0d82b8cd16bc72cd76808752c3"} Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.213390 4773 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jg6k4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:5443/healthz\": dial tcp 10.217.0.25:5443: connect: connection refused" start-of-body= Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.213458 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" podUID="115c791a-d5a3-4b90-b234-a5305f383cd9" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.25:5443/healthz\": dial tcp 10.217.0.25:5443: connect: connection refused" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.213681 4773 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tnzhm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.213749 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.229316 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.231470 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.731436809 +0000 UTC m=+142.309552644 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.231598 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-xsqsg" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.255838 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s9vvb" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.263800 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2rj56" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.331999 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.332334 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.832321426 +0000 UTC m=+142.410437251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.338358 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-jm24g" podStartSLOduration=122.338332107 podStartE2EDuration="2m2.338332107s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.299673129 +0000 UTC m=+141.877788964" watchObservedRunningTime="2026-01-22 11:57:14.338332107 +0000 UTC m=+141.916447942" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.381083 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-ls4mr" podStartSLOduration=123.381065261 podStartE2EDuration="2m3.381065261s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.381014718 +0000 UTC m=+141.959130543" watchObservedRunningTime="2026-01-22 11:57:14.381065261 +0000 UTC m=+141.959181086" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.382628 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-r844w" podStartSLOduration=123.382623098 podStartE2EDuration="2m3.382623098s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.337752921 +0000 UTC m=+141.915868746" watchObservedRunningTime="2026-01-22 11:57:14.382623098 +0000 UTC m=+141.960738923" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.406106 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-fmqk7" podStartSLOduration=122.406084536 podStartE2EDuration="2m2.406084536s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.401618132 +0000 UTC m=+141.979733967" watchObservedRunningTime="2026-01-22 11:57:14.406084536 +0000 UTC m=+141.984200361" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.438781 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.439587 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:14.939565159 +0000 UTC m=+142.517680984 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.495117 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2hwmg" podStartSLOduration=122.495093238 podStartE2EDuration="2m2.495093238s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.428103632 +0000 UTC m=+142.006219457" watchObservedRunningTime="2026-01-22 11:57:14.495093238 +0000 UTC m=+142.073209063" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.541328 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.541736 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.041722131 +0000 UTC m=+142.619837956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.571635 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-5ktrv" podStartSLOduration=8.571611918 podStartE2EDuration="8.571611918s" podCreationTimestamp="2026-01-22 11:57:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.498623201 +0000 UTC m=+142.076739046" watchObservedRunningTime="2026-01-22 11:57:14.571611918 +0000 UTC m=+142.149727743" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.577562 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" podStartSLOduration=123.577539135 podStartE2EDuration="2m3.577539135s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.57234035 +0000 UTC m=+142.150456205" watchObservedRunningTime="2026-01-22 11:57:14.577539135 +0000 UTC m=+142.155654960" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.648648 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.648987 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.148967844 +0000 UTC m=+142.727083669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.672271 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:14 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:14 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:14 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.672370 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.732707 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" podStartSLOduration=123.732689577 podStartE2EDuration="2m3.732689577s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.731715805 +0000 UTC m=+142.309831630" watchObservedRunningTime="2026-01-22 11:57:14.732689577 +0000 UTC m=+142.310805402" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.734086 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" podStartSLOduration=122.734076197 podStartE2EDuration="2m2.734076197s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.646539729 +0000 UTC m=+142.224655554" watchObservedRunningTime="2026-01-22 11:57:14.734076197 +0000 UTC m=+142.312192022" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.749756 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.750195 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.250180986 +0000 UTC m=+142.828296811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.753096 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-bw97w" podStartSLOduration=122.753057941 podStartE2EDuration="2m2.753057941s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.752260066 +0000 UTC m=+142.330375911" watchObservedRunningTime="2026-01-22 11:57:14.753057941 +0000 UTC m=+142.331173776" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.788571 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-r7gll" podStartSLOduration=122.788548191 podStartE2EDuration="2m2.788548191s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.782627954 +0000 UTC m=+142.360743789" watchObservedRunningTime="2026-01-22 11:57:14.788548191 +0000 UTC m=+142.366664016" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.810981 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" podStartSLOduration=122.810961563 podStartE2EDuration="2m2.810961563s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.806754001 +0000 UTC m=+142.384869836" watchObservedRunningTime="2026-01-22 11:57:14.810961563 +0000 UTC m=+142.389077398" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.837831 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" podStartSLOduration=123.837815527 podStartE2EDuration="2m3.837815527s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.832566399 +0000 UTC m=+142.410682234" watchObservedRunningTime="2026-01-22 11:57:14.837815527 +0000 UTC m=+142.415931352" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.852831 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.853253 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.353232966 +0000 UTC m=+142.931348791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.885849 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jc2cw" podStartSLOduration=122.885830461 podStartE2EDuration="2m2.885830461s" podCreationTimestamp="2026-01-22 11:55:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:14.862888545 +0000 UTC m=+142.441004370" watchObservedRunningTime="2026-01-22 11:57:14.885830461 +0000 UTC m=+142.463946286" Jan 22 11:57:14 crc kubenswrapper[4773]: I0122 11:57:14.954089 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:14 crc kubenswrapper[4773]: E0122 11:57:14.954561 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.454545302 +0000 UTC m=+143.032661127 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.061913 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.062954 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.562935205 +0000 UTC m=+143.141051040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.164071 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.164646 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.664624507 +0000 UTC m=+143.242740422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.188885 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s9kn2"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.190024 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.196808 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.211003 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9kn2"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.223593 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" event={"ID":"a3b03ed7-b33f-411b-aa66-d06360af63d1","Type":"ContainerStarted","Data":"26bdbdef0f9c1ffe84103d7b4e5133702995aa83a2587ca11e734160ad2f989b"} Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.255787 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.268877 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.269543 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p99s\" (UniqueName: \"kubernetes.io/projected/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-kube-api-access-8p99s\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.269682 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-catalog-content\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.269891 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-utilities\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.292901 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.792874541 +0000 UTC m=+143.370990366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.383486 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cf7rt"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.385353 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.389452 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.399048 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cf7rt"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.404394 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p99s\" (UniqueName: \"kubernetes.io/projected/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-kube-api-access-8p99s\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.404449 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-catalog-content\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.404498 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-utilities\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.404543 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.404834 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:15.904822219 +0000 UTC m=+143.482938044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.404989 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-catalog-content\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.405246 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-utilities\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.472703 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p99s\" (UniqueName: \"kubernetes.io/projected/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-kube-api-access-8p99s\") pod \"certified-operators-s9kn2\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.508627 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.509007 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.509314 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-catalog-content\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.509400 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-utilities\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.509474 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpttn\" (UniqueName: \"kubernetes.io/projected/ce66454f-39e9-4aac-9887-987e15252181-kube-api-access-cpttn\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.509645 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.009624866 +0000 UTC m=+143.587740691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.585151 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qv7sq"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.586421 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.609310 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qv7sq"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.610934 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-catalog-content\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.610990 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-utilities\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.611040 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.611056 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpttn\" (UniqueName: \"kubernetes.io/projected/ce66454f-39e9-4aac-9887-987e15252181-kube-api-access-cpttn\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.611670 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-catalog-content\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.611879 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-utilities\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.612124 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.112109732 +0000 UTC m=+143.690225547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.657370 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpttn\" (UniqueName: \"kubernetes.io/projected/ce66454f-39e9-4aac-9887-987e15252181-kube-api-access-cpttn\") pod \"community-operators-cf7rt\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.664670 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:15 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:15 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:15 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.664734 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.706819 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.733378 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.733648 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2x9c\" (UniqueName: \"kubernetes.io/projected/11240379-c011-45ef-8626-f59a8772a5ce-kube-api-access-z2x9c\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.733733 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-utilities\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.733795 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-catalog-content\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.733893 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.233875266 +0000 UTC m=+143.811991091 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.794866 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6j54f"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.795983 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.815766 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6j54f"] Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.838091 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-catalog-content\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.838178 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2x9c\" (UniqueName: \"kubernetes.io/projected/11240379-c011-45ef-8626-f59a8772a5ce-kube-api-access-z2x9c\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.839076 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-catalog-content\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.847392 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.847791 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.347775088 +0000 UTC m=+143.925890913 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.848002 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-utilities\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.848467 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-utilities\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.882192 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2x9c\" (UniqueName: \"kubernetes.io/projected/11240379-c011-45ef-8626-f59a8772a5ce-kube-api-access-z2x9c\") pod \"certified-operators-qv7sq\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.902937 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.949994 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.950534 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-utilities\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.950590 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgqmm\" (UniqueName: \"kubernetes.io/projected/792b0be2-378b-4296-87ff-ca27a76013bd-kube-api-access-hgqmm\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.950656 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-catalog-content\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:15 crc kubenswrapper[4773]: E0122 11:57:15.950827 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.450810048 +0000 UTC m=+144.028925883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:15 crc kubenswrapper[4773]: I0122 11:57:15.974824 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.047633 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jg6k4" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.054412 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-catalog-content\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.054482 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-utilities\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.054529 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.054550 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgqmm\" (UniqueName: \"kubernetes.io/projected/792b0be2-378b-4296-87ff-ca27a76013bd-kube-api-access-hgqmm\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.054827 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-catalog-content\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.055042 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-utilities\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.055078 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.555067392 +0000 UTC m=+144.133183217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.099023 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgqmm\" (UniqueName: \"kubernetes.io/projected/792b0be2-378b-4296-87ff-ca27a76013bd-kube-api-access-hgqmm\") pod \"community-operators-6j54f\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.152639 4773 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.152931 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.155724 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.156036 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.656021542 +0000 UTC m=+144.234137367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.257295 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.257825 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.757800528 +0000 UTC m=+144.335916343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.281759 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" event={"ID":"a3b03ed7-b33f-411b-aa66-d06360af63d1","Type":"ContainerStarted","Data":"d2d6f07d0b78f8f8056677d42dcf2a7ee39df6416be28f874e7994dbf574de35"} Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.299796 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-64x99" event={"ID":"e6833a75-2530-4fb4-9740-af41ec49d5e5","Type":"ContainerStarted","Data":"22b94d254321f3b9d19c5024cfce81696953592977760cdcbfb9dec4a34dfaff"} Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.311508 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cf7rt"] Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.323746 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9kn2"] Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.358103 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.358842 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.858819991 +0000 UTC m=+144.436935816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.373024 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" podStartSLOduration=125.373009637 podStartE2EDuration="2m5.373009637s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:16.364947547 +0000 UTC m=+143.943063372" watchObservedRunningTime="2026-01-22 11:57:16.373009637 +0000 UTC m=+143.951125462" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.434718 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8fll2" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.480977 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.481416 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:16.981401909 +0000 UTC m=+144.559517734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.535997 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qv7sq"] Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.589842 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.590205 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:17.09019008 +0000 UTC m=+144.668305905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.668672 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:16 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:16 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:16 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.668982 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.693044 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.693466 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:17.1934514 +0000 UTC m=+144.771567225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.799777 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.800158 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 11:57:17.300140229 +0000 UTC m=+144.878256054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.849316 4773 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-22T11:57:16.152673177Z","Handler":null,"Name":""} Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.901908 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:16 crc kubenswrapper[4773]: E0122 11:57:16.902641 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 11:57:17.402626756 +0000 UTC m=+144.980742581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-dzt2p" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.930370 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6j54f"] Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.930659 4773 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 22 11:57:16 crc kubenswrapper[4773]: I0122 11:57:16.930705 4773 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.003465 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.062992 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.105209 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.122695 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.122937 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.176339 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bwxfc"] Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.177917 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.180796 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.213119 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bwxfc"] Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.268148 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-dzt2p\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.305359 4773 generic.go:334] "Generic (PLEG): container finished" podID="792b0be2-378b-4296-87ff-ca27a76013bd" containerID="a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09" exitCode=0 Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.306241 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6j54f" event={"ID":"792b0be2-378b-4296-87ff-ca27a76013bd","Type":"ContainerDied","Data":"a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.306312 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6j54f" event={"ID":"792b0be2-378b-4296-87ff-ca27a76013bd","Type":"ContainerStarted","Data":"a8ffd972ceae841c5e7b477d17d4c88140f9fddb9acc95214464a3d4f1c94df5"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.312240 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-utilities\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.312351 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-catalog-content\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.312426 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnd5q\" (UniqueName: \"kubernetes.io/projected/be427c4b-854d-442d-86b1-19ee1d69814e-kube-api-access-rnd5q\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.317328 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.320360 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-64x99" event={"ID":"e6833a75-2530-4fb4-9740-af41ec49d5e5","Type":"ContainerStarted","Data":"19bc495dbfcec8d339bc54b21da25471cd4c09223ebc1a1e207e2e0bf8719737"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.320397 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-64x99" event={"ID":"e6833a75-2530-4fb4-9740-af41ec49d5e5","Type":"ContainerStarted","Data":"0f0c9b4b63347d6b518a08c3d54a358f1754c632220c745fbd018b24d9ae8e60"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.322453 4773 generic.go:334] "Generic (PLEG): container finished" podID="11240379-c011-45ef-8626-f59a8772a5ce" containerID="1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb" exitCode=0 Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.322508 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerDied","Data":"1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.322527 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerStarted","Data":"25ffffbfae3fd3bb7a26e94568ba4fc67346e0574ec3ed59dc4f2e98a0dde41b"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.332705 4773 generic.go:334] "Generic (PLEG): container finished" podID="ce66454f-39e9-4aac-9887-987e15252181" containerID="b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35" exitCode=0 Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.333008 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf7rt" event={"ID":"ce66454f-39e9-4aac-9887-987e15252181","Type":"ContainerDied","Data":"b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.333054 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf7rt" event={"ID":"ce66454f-39e9-4aac-9887-987e15252181","Type":"ContainerStarted","Data":"93bf5e16fd966159eb39bccb38a600e392f611e5c06245597802da79da2ed27b"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.338360 4773 generic.go:334] "Generic (PLEG): container finished" podID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerID="e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338" exitCode=0 Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.339553 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerDied","Data":"e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.339586 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerStarted","Data":"ce52184bb0243b576769de9b67dc16cc453924a2c3dd9785079c2580e8dccf54"} Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.412785 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-64x99" podStartSLOduration=11.41277074 podStartE2EDuration="11.41277074s" podCreationTimestamp="2026-01-22 11:57:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:17.410041962 +0000 UTC m=+144.988157797" watchObservedRunningTime="2026-01-22 11:57:17.41277074 +0000 UTC m=+144.990886565" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.416939 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-catalog-content\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.417061 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnd5q\" (UniqueName: \"kubernetes.io/projected/be427c4b-854d-442d-86b1-19ee1d69814e-kube-api-access-rnd5q\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.417226 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-utilities\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.418976 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-utilities\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.419025 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-catalog-content\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.436108 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.449959 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnd5q\" (UniqueName: \"kubernetes.io/projected/be427c4b-854d-442d-86b1-19ee1d69814e-kube-api-access-rnd5q\") pod \"redhat-marketplace-bwxfc\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.524473 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.585242 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zvhf9"] Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.586525 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.600757 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zvhf9"] Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.659294 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:17 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:17 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:17 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.659550 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.724015 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-utilities\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.724209 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbfgd\" (UniqueName: \"kubernetes.io/projected/fbe13262-f7ed-4311-bf73-c2dd21ef3733-kube-api-access-jbfgd\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.724378 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-catalog-content\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.827866 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbfgd\" (UniqueName: \"kubernetes.io/projected/fbe13262-f7ed-4311-bf73-c2dd21ef3733-kube-api-access-jbfgd\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.827947 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-catalog-content\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.827966 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-utilities\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.828682 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-utilities\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.828763 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-catalog-content\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.860433 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbfgd\" (UniqueName: \"kubernetes.io/projected/fbe13262-f7ed-4311-bf73-c2dd21ef3733-kube-api-access-jbfgd\") pod \"redhat-marketplace-zvhf9\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.916434 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:57:17 crc kubenswrapper[4773]: I0122 11:57:17.940058 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dzt2p"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.007220 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bwxfc"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.286520 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zvhf9"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.363423 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zvhf9" event={"ID":"fbe13262-f7ed-4311-bf73-c2dd21ef3733","Type":"ContainerStarted","Data":"69de6be4a5c5178a02c10c17637fafd7d4db90c5cf099f2a30cbc53cd71401c9"} Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.366658 4773 generic.go:334] "Generic (PLEG): container finished" podID="be427c4b-854d-442d-86b1-19ee1d69814e" containerID="c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328" exitCode=0 Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.366741 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bwxfc" event={"ID":"be427c4b-854d-442d-86b1-19ee1d69814e","Type":"ContainerDied","Data":"c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328"} Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.366766 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bwxfc" event={"ID":"be427c4b-854d-442d-86b1-19ee1d69814e","Type":"ContainerStarted","Data":"3e6fa17fa287403f9f79ffeb163c0a0ab074a513c6ebb754c6577f700af83f9c"} Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.378801 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dfvhr"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.380480 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.382972 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.392812 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" event={"ID":"74d68a97-0d9e-4eb0-886f-a61ae22935ab","Type":"ContainerStarted","Data":"16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15"} Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.392859 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" event={"ID":"74d68a97-0d9e-4eb0-886f-a61ae22935ab","Type":"ContainerStarted","Data":"8944987e21a184f5989bdef1f5ffd54a1d443ccb95dffa82ebe4f007f21d6ab3"} Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.394056 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.453773 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dfvhr"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.502855 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" podStartSLOduration=127.502836125 podStartE2EDuration="2m7.502836125s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:18.498973757 +0000 UTC m=+146.077089592" watchObservedRunningTime="2026-01-22 11:57:18.502836125 +0000 UTC m=+146.080951960" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.519522 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.520343 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.533385 4773 patch_prober.go:28] interesting pod/console-f9d7485db-lrxk6 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.533449 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-lrxk6" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.556781 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-utilities\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.556888 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhclv\" (UniqueName: \"kubernetes.io/projected/fc3f7a11-33be-4f89-abf4-62f1f13ad823-kube-api-access-jhclv\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.556975 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-catalog-content\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.653139 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.660120 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:18 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:18 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:18 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.660181 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.666511 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-utilities\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.666599 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhclv\" (UniqueName: \"kubernetes.io/projected/fc3f7a11-33be-4f89-abf4-62f1f13ad823-kube-api-access-jhclv\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.666728 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-catalog-content\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.667186 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-catalog-content\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.667471 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-utilities\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.675961 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.700235 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhclv\" (UniqueName: \"kubernetes.io/projected/fc3f7a11-33be-4f89-abf4-62f1f13ad823-kube-api-access-jhclv\") pod \"redhat-operators-dfvhr\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.711768 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.804000 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vfkxp"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.805105 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.817517 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vfkxp"] Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870042 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870114 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-utilities\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870152 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870192 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-catalog-content\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870212 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v9ht\" (UniqueName: \"kubernetes.io/projected/44223378-fd96-41e8-9189-558b02fb1d2d-kube-api-access-5v9ht\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.870305 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.874401 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.879724 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.883715 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.884817 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.928698 4773 patch_prober.go:28] interesting pod/downloads-7954f5f757-trjxm container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.929014 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-trjxm" podUID="943bb747-f86c-4422-b99f-97a7fdb07b87" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.928885 4773 patch_prober.go:28] interesting pod/downloads-7954f5f757-trjxm container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.929234 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-trjxm" podUID="943bb747-f86c-4422-b99f-97a7fdb07b87" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.971470 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-utilities\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.971552 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-catalog-content\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.971574 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v9ht\" (UniqueName: \"kubernetes.io/projected/44223378-fd96-41e8-9189-558b02fb1d2d-kube-api-access-5v9ht\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.972653 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-utilities\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:18 crc kubenswrapper[4773]: I0122 11:57:18.972933 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-catalog-content\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.017239 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v9ht\" (UniqueName: \"kubernetes.io/projected/44223378-fd96-41e8-9189-558b02fb1d2d-kube-api-access-5v9ht\") pod \"redhat-operators-vfkxp\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.074722 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.081820 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.088992 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.161503 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.342236 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dfvhr"] Jan 22 11:57:19 crc kubenswrapper[4773]: W0122 11:57:19.353529 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc3f7a11_33be_4f89_abf4_62f1f13ad823.slice/crio-496031f0617503d12839fed7b7d30c20da3f8ccd415138e771a26452f2cdec12 WatchSource:0}: Error finding container 496031f0617503d12839fed7b7d30c20da3f8ccd415138e771a26452f2cdec12: Status 404 returned error can't find the container with id 496031f0617503d12839fed7b7d30c20da3f8ccd415138e771a26452f2cdec12 Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.406339 4773 generic.go:334] "Generic (PLEG): container finished" podID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerID="ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57" exitCode=0 Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.407057 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zvhf9" event={"ID":"fbe13262-f7ed-4311-bf73-c2dd21ef3733","Type":"ContainerDied","Data":"ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57"} Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.411943 4773 generic.go:334] "Generic (PLEG): container finished" podID="a8dcf163-8c98-4db0-95e5-1d96e45a619c" containerID="ce0f364ec23d46638c2b8943d476778894a8a9b7faa8359e7f4764cc1c6c3d67" exitCode=0 Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.412008 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" event={"ID":"a8dcf163-8c98-4db0-95e5-1d96e45a619c","Type":"ContainerDied","Data":"ce0f364ec23d46638c2b8943d476778894a8a9b7faa8359e7f4764cc1c6c3d67"} Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.414039 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerStarted","Data":"496031f0617503d12839fed7b7d30c20da3f8ccd415138e771a26452f2cdec12"} Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.439694 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.561495 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vfkxp"] Jan 22 11:57:19 crc kubenswrapper[4773]: W0122 11:57:19.581354 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44223378_fd96_41e8_9189_558b02fb1d2d.slice/crio-7812948d3b5807150e8279fe5bac0853eca2d4513efb7e9b63ede5f4143d2668 WatchSource:0}: Error finding container 7812948d3b5807150e8279fe5bac0853eca2d4513efb7e9b63ede5f4143d2668: Status 404 returned error can't find the container with id 7812948d3b5807150e8279fe5bac0853eca2d4513efb7e9b63ede5f4143d2668 Jan 22 11:57:19 crc kubenswrapper[4773]: W0122 11:57:19.657754 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-a948b8172076cc92a209ed26bcaf043e22d8d06f26294f681bf15e7ac0caab4e WatchSource:0}: Error finding container a948b8172076cc92a209ed26bcaf043e22d8d06f26294f681bf15e7ac0caab4e: Status 404 returned error can't find the container with id a948b8172076cc92a209ed26bcaf043e22d8d06f26294f681bf15e7ac0caab4e Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.658726 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:19 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:19 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:19 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:19 crc kubenswrapper[4773]: I0122 11:57:19.658769 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:19 crc kubenswrapper[4773]: W0122 11:57:19.667002 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-b50098418f0f09b6ee796c12493ae48ad115dcb09d9d0182183668dfe9b01537 WatchSource:0}: Error finding container b50098418f0f09b6ee796c12493ae48ad115dcb09d9d0182183668dfe9b01537: Status 404 returned error can't find the container with id b50098418f0f09b6ee796c12493ae48ad115dcb09d9d0182183668dfe9b01537 Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.420656 4773 generic.go:334] "Generic (PLEG): container finished" podID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerID="61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f" exitCode=0 Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.420996 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerDied","Data":"61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.437607 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b7a60e5e0d468fe62014acd0069c36340e612023e34acb7181e66606ed1566cc"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.437743 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b50098418f0f09b6ee796c12493ae48ad115dcb09d9d0182183668dfe9b01537"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.440657 4773 generic.go:334] "Generic (PLEG): container finished" podID="44223378-fd96-41e8-9189-558b02fb1d2d" containerID="0d1ef56f0939ce050cc94f8f7be77bb067980a3501f051c1d06b949b40e20358" exitCode=0 Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.440749 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vfkxp" event={"ID":"44223378-fd96-41e8-9189-558b02fb1d2d","Type":"ContainerDied","Data":"0d1ef56f0939ce050cc94f8f7be77bb067980a3501f051c1d06b949b40e20358"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.440774 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vfkxp" event={"ID":"44223378-fd96-41e8-9189-558b02fb1d2d","Type":"ContainerStarted","Data":"7812948d3b5807150e8279fe5bac0853eca2d4513efb7e9b63ede5f4143d2668"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.449117 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d250614a20695ca6055e6d341ca99facb7b3cb890011ae3083ff2d53ad115d1d"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.449188 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d6b3d5dd00fa66a77e88189099545f0bf9f0d44bf3d94c4eec792f70d4d24c2d"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.449355 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.453180 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"8e486897e21e7e5bace2f05f7b136f006762e4e6c06e2fe95ba6f0dec2efa6b4"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.453215 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a948b8172076cc92a209ed26bcaf043e22d8d06f26294f681bf15e7ac0caab4e"} Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.492223 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.492650 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.503977 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.657426 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:20 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:20 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:20 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.657484 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.756167 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.769268 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 11:57:20 crc kubenswrapper[4773]: E0122 11:57:20.769504 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8dcf163-8c98-4db0-95e5-1d96e45a619c" containerName="collect-profiles" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.769515 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8dcf163-8c98-4db0-95e5-1d96e45a619c" containerName="collect-profiles" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.769608 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8dcf163-8c98-4db0-95e5-1d96e45a619c" containerName="collect-profiles" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.769935 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.772421 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.772469 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.789201 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.802345 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume\") pod \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.802493 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8dcf163-8c98-4db0-95e5-1d96e45a619c-secret-volume\") pod \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.802582 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t24gj\" (UniqueName: \"kubernetes.io/projected/a8dcf163-8c98-4db0-95e5-1d96e45a619c-kube-api-access-t24gj\") pod \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\" (UID: \"a8dcf163-8c98-4db0-95e5-1d96e45a619c\") " Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.803135 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume" (OuterVolumeSpecName: "config-volume") pod "a8dcf163-8c98-4db0-95e5-1d96e45a619c" (UID: "a8dcf163-8c98-4db0-95e5-1d96e45a619c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.806321 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a8dcf163-8c98-4db0-95e5-1d96e45a619c-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.810221 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8dcf163-8c98-4db0-95e5-1d96e45a619c-kube-api-access-t24gj" (OuterVolumeSpecName: "kube-api-access-t24gj") pod "a8dcf163-8c98-4db0-95e5-1d96e45a619c" (UID: "a8dcf163-8c98-4db0-95e5-1d96e45a619c"). InnerVolumeSpecName "kube-api-access-t24gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.814056 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8dcf163-8c98-4db0-95e5-1d96e45a619c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a8dcf163-8c98-4db0-95e5-1d96e45a619c" (UID: "a8dcf163-8c98-4db0-95e5-1d96e45a619c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.908376 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.908463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.908588 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a8dcf163-8c98-4db0-95e5-1d96e45a619c-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:20 crc kubenswrapper[4773]: I0122 11:57:20.908771 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t24gj\" (UniqueName: \"kubernetes.io/projected/a8dcf163-8c98-4db0-95e5-1d96e45a619c-kube-api-access-t24gj\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.010034 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.010084 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.010169 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.030975 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.102218 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.445574 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 11:57:21 crc kubenswrapper[4773]: W0122 11:57:21.471402 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod3e9cbef4_c960_4f6c_9daa_8a253b2479a9.slice/crio-e0056846e47163d7299c7dad05e948cfe6c861d5d0cf0179742d66efc81adad1 WatchSource:0}: Error finding container e0056846e47163d7299c7dad05e948cfe6c861d5d0cf0179742d66efc81adad1: Status 404 returned error can't find the container with id e0056846e47163d7299c7dad05e948cfe6c861d5d0cf0179742d66efc81adad1 Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.471871 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" event={"ID":"a8dcf163-8c98-4db0-95e5-1d96e45a619c","Type":"ContainerDied","Data":"77c0fef9997351ad7e34f3a743454b492b1fbb26f6621a431ca0911c05c64dc4"} Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.471924 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77c0fef9997351ad7e34f3a743454b492b1fbb26f6621a431ca0911c05c64dc4" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.472516 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.484024 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-xqd5z" Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.656833 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:21 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:21 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:21 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:21 crc kubenswrapper[4773]: I0122 11:57:21.657194 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:22 crc kubenswrapper[4773]: I0122 11:57:22.491429 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e9cbef4-c960-4f6c-9daa-8a253b2479a9","Type":"ContainerStarted","Data":"dd0339a80ff99e1c92ee097ccbbaeb5ab47b87436d7900ea709dc7f97a53ad27"} Jan 22 11:57:22 crc kubenswrapper[4773]: I0122 11:57:22.491476 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e9cbef4-c960-4f6c-9daa-8a253b2479a9","Type":"ContainerStarted","Data":"e0056846e47163d7299c7dad05e948cfe6c861d5d0cf0179742d66efc81adad1"} Jan 22 11:57:22 crc kubenswrapper[4773]: I0122 11:57:22.657659 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:22 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:22 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:22 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:22 crc kubenswrapper[4773]: I0122 11:57:22.657713 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.397587 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.398585 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.401445 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.401708 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.416508 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.472841 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.472910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.518018 4773 generic.go:334] "Generic (PLEG): container finished" podID="3e9cbef4-c960-4f6c-9daa-8a253b2479a9" containerID="dd0339a80ff99e1c92ee097ccbbaeb5ab47b87436d7900ea709dc7f97a53ad27" exitCode=0 Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.518068 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e9cbef4-c960-4f6c-9daa-8a253b2479a9","Type":"ContainerDied","Data":"dd0339a80ff99e1c92ee097ccbbaeb5ab47b87436d7900ea709dc7f97a53ad27"} Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.574312 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.574397 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.574545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.594066 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.659498 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:23 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:23 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:23 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.659563 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:23 crc kubenswrapper[4773]: I0122 11:57:23.736615 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:24 crc kubenswrapper[4773]: I0122 11:57:24.213416 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-5ktrv" Jan 22 11:57:24 crc kubenswrapper[4773]: I0122 11:57:24.252398 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 11:57:24 crc kubenswrapper[4773]: I0122 11:57:24.535214 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"17d40173-22c6-45b3-a66a-a7f4ef0e9dba","Type":"ContainerStarted","Data":"7ec30f6b7a9e1c8d936c88490cda962ce54c132dca657ddc6f6b71b58fad853e"} Jan 22 11:57:24 crc kubenswrapper[4773]: I0122 11:57:24.660345 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:24 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:24 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:24 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:24 crc kubenswrapper[4773]: I0122 11:57:24.660436 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:24 crc kubenswrapper[4773]: I0122 11:57:24.855197 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.008110 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kube-api-access\") pod \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.008213 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kubelet-dir\") pod \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\" (UID: \"3e9cbef4-c960-4f6c-9daa-8a253b2479a9\") " Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.008303 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "3e9cbef4-c960-4f6c-9daa-8a253b2479a9" (UID: "3e9cbef4-c960-4f6c-9daa-8a253b2479a9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.008511 4773 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.034524 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "3e9cbef4-c960-4f6c-9daa-8a253b2479a9" (UID: "3e9cbef4-c960-4f6c-9daa-8a253b2479a9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.110206 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e9cbef4-c960-4f6c-9daa-8a253b2479a9-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.555450 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"3e9cbef4-c960-4f6c-9daa-8a253b2479a9","Type":"ContainerDied","Data":"e0056846e47163d7299c7dad05e948cfe6c861d5d0cf0179742d66efc81adad1"} Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.555487 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0056846e47163d7299c7dad05e948cfe6c861d5d0cf0179742d66efc81adad1" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.555506 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.656609 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:25 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:25 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:25 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:25 crc kubenswrapper[4773]: I0122 11:57:25.656864 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:26 crc kubenswrapper[4773]: I0122 11:57:26.576115 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"17d40173-22c6-45b3-a66a-a7f4ef0e9dba","Type":"ContainerStarted","Data":"5b84ff8b8ff0b45ed1780e36cbf5f0f2d460e38ad9f0f32985c5e824317c2d57"} Jan 22 11:57:26 crc kubenswrapper[4773]: I0122 11:57:26.597864 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.597847441 podStartE2EDuration="3.597847441s" podCreationTimestamp="2026-01-22 11:57:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:57:26.596033232 +0000 UTC m=+154.174149057" watchObservedRunningTime="2026-01-22 11:57:26.597847441 +0000 UTC m=+154.175963256" Jan 22 11:57:26 crc kubenswrapper[4773]: I0122 11:57:26.661045 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:26 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:26 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:26 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:26 crc kubenswrapper[4773]: I0122 11:57:26.661093 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:27 crc kubenswrapper[4773]: I0122 11:57:27.599575 4773 generic.go:334] "Generic (PLEG): container finished" podID="17d40173-22c6-45b3-a66a-a7f4ef0e9dba" containerID="5b84ff8b8ff0b45ed1780e36cbf5f0f2d460e38ad9f0f32985c5e824317c2d57" exitCode=0 Jan 22 11:57:27 crc kubenswrapper[4773]: I0122 11:57:27.599630 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"17d40173-22c6-45b3-a66a-a7f4ef0e9dba","Type":"ContainerDied","Data":"5b84ff8b8ff0b45ed1780e36cbf5f0f2d460e38ad9f0f32985c5e824317c2d57"} Jan 22 11:57:27 crc kubenswrapper[4773]: I0122 11:57:27.655149 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:27 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:27 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:27 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:27 crc kubenswrapper[4773]: I0122 11:57:27.655210 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:28 crc kubenswrapper[4773]: I0122 11:57:28.520537 4773 patch_prober.go:28] interesting pod/console-f9d7485db-lrxk6 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 22 11:57:28 crc kubenswrapper[4773]: I0122 11:57:28.520910 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-lrxk6" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 22 11:57:28 crc kubenswrapper[4773]: I0122 11:57:28.654948 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:28 crc kubenswrapper[4773]: [-]has-synced failed: reason withheld Jan 22 11:57:28 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:28 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:28 crc kubenswrapper[4773]: I0122 11:57:28.655037 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:28 crc kubenswrapper[4773]: I0122 11:57:28.929023 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-trjxm" Jan 22 11:57:30 crc kubenswrapper[4773]: I0122 11:57:30.390593 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:30 crc kubenswrapper[4773]: [+]has-synced ok Jan 22 11:57:30 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:30 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:30 crc kubenswrapper[4773]: I0122 11:57:30.390653 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:30 crc kubenswrapper[4773]: I0122 11:57:30.804781 4773 patch_prober.go:28] interesting pod/router-default-5444994796-zmgk2 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 11:57:30 crc kubenswrapper[4773]: [+]has-synced ok Jan 22 11:57:30 crc kubenswrapper[4773]: [+]process-running ok Jan 22 11:57:30 crc kubenswrapper[4773]: healthz check failed Jan 22 11:57:30 crc kubenswrapper[4773]: I0122 11:57:30.805200 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-zmgk2" podUID="80b4c3da-59cb-4154-ade0-92772eb0fe8a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 11:57:31 crc kubenswrapper[4773]: I0122 11:57:31.666998 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:31 crc kubenswrapper[4773]: I0122 11:57:31.702411 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-zmgk2" Jan 22 11:57:33 crc kubenswrapper[4773]: I0122 11:57:33.302595 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:57:33 crc kubenswrapper[4773]: I0122 11:57:33.311446 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c1ed4b8d-def5-474b-8629-cd0bae7e49a6-metrics-certs\") pod \"network-metrics-daemon-5tqwr\" (UID: \"c1ed4b8d-def5-474b-8629-cd0bae7e49a6\") " pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:57:33 crc kubenswrapper[4773]: I0122 11:57:33.385946 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-5tqwr" Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.077538 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.077646 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.195871 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-r5t5b"] Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.197399 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" containerID="cri-o://3a9609fafdbec2f25ef045edc84b114e7a16cfe30f69618ecb2e2c38a522e1ce" gracePeriod=30 Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.209427 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv"] Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.209691 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" containerID="cri-o://108029edb25e91ff04a4445e50163c7808179e8f6451ae20a9a52d248631a120" gracePeriod=30 Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.912186 4773 generic.go:334] "Generic (PLEG): container finished" podID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerID="108029edb25e91ff04a4445e50163c7808179e8f6451ae20a9a52d248631a120" exitCode=0 Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.912300 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" event={"ID":"ea377cf5-fbd8-462d-bfea-dd3aca3da018","Type":"ContainerDied","Data":"108029edb25e91ff04a4445e50163c7808179e8f6451ae20a9a52d248631a120"} Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.914506 4773 generic.go:334] "Generic (PLEG): container finished" podID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerID="3a9609fafdbec2f25ef045edc84b114e7a16cfe30f69618ecb2e2c38a522e1ce" exitCode=0 Jan 22 11:57:34 crc kubenswrapper[4773]: I0122 11:57:34.914537 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" event={"ID":"143cf6af-d9df-44fc-8281-17ee0a86807c","Type":"ContainerDied","Data":"3a9609fafdbec2f25ef045edc84b114e7a16cfe30f69618ecb2e2c38a522e1ce"} Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.393725 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.540267 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kubelet-dir\") pod \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.540375 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kube-api-access\") pod \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\" (UID: \"17d40173-22c6-45b3-a66a-a7f4ef0e9dba\") " Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.540420 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "17d40173-22c6-45b3-a66a-a7f4ef0e9dba" (UID: "17d40173-22c6-45b3-a66a-a7f4ef0e9dba"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.542510 4773 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.554618 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "17d40173-22c6-45b3-a66a-a7f4ef0e9dba" (UID: "17d40173-22c6-45b3-a66a-a7f4ef0e9dba"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:57:36 crc kubenswrapper[4773]: I0122 11:57:36.643784 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17d40173-22c6-45b3-a66a-a7f4ef0e9dba-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:37 crc kubenswrapper[4773]: I0122 11:57:37.019819 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"17d40173-22c6-45b3-a66a-a7f4ef0e9dba","Type":"ContainerDied","Data":"7ec30f6b7a9e1c8d936c88490cda962ce54c132dca657ddc6f6b71b58fad853e"} Jan 22 11:57:37 crc kubenswrapper[4773]: I0122 11:57:37.019878 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ec30f6b7a9e1c8d936c88490cda962ce54c132dca657ddc6f6b71b58fad853e" Jan 22 11:57:37 crc kubenswrapper[4773]: I0122 11:57:37.020128 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 11:57:37 crc kubenswrapper[4773]: I0122 11:57:37.443552 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 11:57:38 crc kubenswrapper[4773]: I0122 11:57:38.525083 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:38 crc kubenswrapper[4773]: I0122 11:57:38.529468 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 11:57:38 crc kubenswrapper[4773]: I0122 11:57:38.565110 4773 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5jllv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 22 11:57:38 crc kubenswrapper[4773]: I0122 11:57:38.565178 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 22 11:57:40 crc kubenswrapper[4773]: I0122 11:57:40.566073 4773 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-r5t5b container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 22 11:57:40 crc kubenswrapper[4773]: I0122 11:57:40.566168 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 22 11:57:48 crc kubenswrapper[4773]: I0122 11:57:48.565477 4773 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5jllv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Jan 22 11:57:48 crc kubenswrapper[4773]: I0122 11:57:48.566038 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Jan 22 11:57:49 crc kubenswrapper[4773]: I0122 11:57:49.452618 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-tqwkj" Jan 22 11:57:51 crc kubenswrapper[4773]: I0122 11:57:51.596166 4773 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-r5t5b container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 11:57:51 crc kubenswrapper[4773]: I0122 11:57:51.596313 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 11:57:51 crc kubenswrapper[4773]: I0122 11:57:51.977046 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.053941 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") pod \"143cf6af-d9df-44fc-8281-17ee0a86807c\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.053994 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") pod \"143cf6af-d9df-44fc-8281-17ee0a86807c\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.054021 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg6pk\" (UniqueName: \"kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk\") pod \"143cf6af-d9df-44fc-8281-17ee0a86807c\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.054142 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") pod \"143cf6af-d9df-44fc-8281-17ee0a86807c\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.054205 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") pod \"143cf6af-d9df-44fc-8281-17ee0a86807c\" (UID: \"143cf6af-d9df-44fc-8281-17ee0a86807c\") " Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.055614 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "143cf6af-d9df-44fc-8281-17ee0a86807c" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.055716 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca" (OuterVolumeSpecName: "client-ca") pod "143cf6af-d9df-44fc-8281-17ee0a86807c" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.057668 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-667964fbf9-n2jf8"] Jan 22 11:57:52 crc kubenswrapper[4773]: E0122 11:57:52.058833 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9cbef4-c960-4f6c-9daa-8a253b2479a9" containerName="pruner" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.058875 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9cbef4-c960-4f6c-9daa-8a253b2479a9" containerName="pruner" Jan 22 11:57:52 crc kubenswrapper[4773]: E0122 11:57:52.058921 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17d40173-22c6-45b3-a66a-a7f4ef0e9dba" containerName="pruner" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.058934 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="17d40173-22c6-45b3-a66a-a7f4ef0e9dba" containerName="pruner" Jan 22 11:57:52 crc kubenswrapper[4773]: E0122 11:57:52.058949 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.058960 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.059131 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9cbef4-c960-4f6c-9daa-8a253b2479a9" containerName="pruner" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.059153 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="17d40173-22c6-45b3-a66a-a7f4ef0e9dba" containerName="pruner" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.059169 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" containerName="controller-manager" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.058364 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config" (OuterVolumeSpecName: "config") pod "143cf6af-d9df-44fc-8281-17ee0a86807c" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.059964 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.061268 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk" (OuterVolumeSpecName: "kube-api-access-sg6pk") pod "143cf6af-d9df-44fc-8281-17ee0a86807c" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c"). InnerVolumeSpecName "kube-api-access-sg6pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.067762 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-667964fbf9-n2jf8"] Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.067883 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "143cf6af-d9df-44fc-8281-17ee0a86807c" (UID: "143cf6af-d9df-44fc-8281-17ee0a86807c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.156582 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-proxy-ca-bundles\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.156847 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2333d480-ea82-4a7a-89a5-c6576911a2fc-serving-cert\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.156979 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g56b\" (UniqueName: \"kubernetes.io/projected/2333d480-ea82-4a7a-89a5-c6576911a2fc-kube-api-access-7g56b\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157153 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-config\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157254 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-client-ca\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157596 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157622 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157638 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg6pk\" (UniqueName: \"kubernetes.io/projected/143cf6af-d9df-44fc-8281-17ee0a86807c-kube-api-access-sg6pk\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157659 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/143cf6af-d9df-44fc-8281-17ee0a86807c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.157673 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/143cf6af-d9df-44fc-8281-17ee0a86807c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.258877 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-config\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.258929 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-client-ca\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.258981 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-proxy-ca-bundles\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.259013 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2333d480-ea82-4a7a-89a5-c6576911a2fc-serving-cert\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.259070 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g56b\" (UniqueName: \"kubernetes.io/projected/2333d480-ea82-4a7a-89a5-c6576911a2fc-kube-api-access-7g56b\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.260147 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-client-ca\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.260647 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-proxy-ca-bundles\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.260745 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-config\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.272932 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2333d480-ea82-4a7a-89a5-c6576911a2fc-serving-cert\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.276393 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g56b\" (UniqueName: \"kubernetes.io/projected/2333d480-ea82-4a7a-89a5-c6576911a2fc-kube-api-access-7g56b\") pod \"controller-manager-667964fbf9-n2jf8\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.416143 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.488801 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" event={"ID":"143cf6af-d9df-44fc-8281-17ee0a86807c","Type":"ContainerDied","Data":"ba989a4aff6faef36ca241ddcea8cce4d90c678b1a2bc3d0216a7246f1ba4e0a"} Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.488889 4773 scope.go:117] "RemoveContainer" containerID="3a9609fafdbec2f25ef045edc84b114e7a16cfe30f69618ecb2e2c38a522e1ce" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.488883 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-r5t5b" Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.530972 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-r5t5b"] Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.536103 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-r5t5b"] Jan 22 11:57:52 crc kubenswrapper[4773]: I0122 11:57:52.665459 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="143cf6af-d9df-44fc-8281-17ee0a86807c" path="/var/lib/kubelet/pods/143cf6af-d9df-44fc-8281-17ee0a86807c/volumes" Jan 22 11:57:54 crc kubenswrapper[4773]: I0122 11:57:54.102366 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-667964fbf9-n2jf8"] Jan 22 11:57:59 crc kubenswrapper[4773]: I0122 11:57:59.190243 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 11:57:59 crc kubenswrapper[4773]: I0122 11:57:59.565820 4773 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5jllv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: i/o timeout" start-of-body= Jan 22 11:57:59 crc kubenswrapper[4773]: I0122 11:57:59.566122 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: i/o timeout" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.606670 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.607621 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.610635 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.611043 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.613305 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.776838 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.776992 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.882172 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.882268 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.882371 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.905467 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:00 crc kubenswrapper[4773]: I0122 11:58:00.934098 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.094748 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.095090 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 11:58:04 crc kubenswrapper[4773]: E0122 11:58:04.136685 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 11:58:04 crc kubenswrapper[4773]: E0122 11:58:04.137175 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5v9ht,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vfkxp_openshift-marketplace(44223378-fd96-41e8-9189-558b02fb1d2d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:04 crc kubenswrapper[4773]: E0122 11:58:04.138389 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vfkxp" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.809786 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.812748 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.815930 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.928759 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kube-api-access\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.928830 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-var-lock\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:04 crc kubenswrapper[4773]: I0122 11:58:04.928868 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.029980 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kube-api-access\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.030049 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-var-lock\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.030103 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.030229 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.030231 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-var-lock\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.048905 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kube-api-access\") pod \"installer-9-crc\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: I0122 11:58:05.138923 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:05 crc kubenswrapper[4773]: E0122 11:58:05.723694 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vfkxp" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" Jan 22 11:58:05 crc kubenswrapper[4773]: E0122 11:58:05.786505 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 11:58:05 crc kubenswrapper[4773]: E0122 11:58:05.786670 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z2x9c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-qv7sq_openshift-marketplace(11240379-c011-45ef-8626-f59a8772a5ce): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:05 crc kubenswrapper[4773]: E0122 11:58:05.788590 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-qv7sq" podUID="11240379-c011-45ef-8626-f59a8772a5ce" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.365826 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-qv7sq" podUID="11240379-c011-45ef-8626-f59a8772a5ce" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.444813 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.445508 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cpttn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-cf7rt_openshift-marketplace(ce66454f-39e9-4aac-9887-987e15252181): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.447233 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-cf7rt" podUID="ce66454f-39e9-4aac-9887-987e15252181" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.461555 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.461832 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hgqmm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6j54f_openshift-marketplace(792b0be2-378b-4296-87ff-ca27a76013bd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.463025 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6j54f" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.477600 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.477744 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8p99s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-s9kn2_openshift-marketplace(c4d3a6db-50cb-4809-ac76-edb0a9d949a1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:07 crc kubenswrapper[4773]: E0122 11:58:07.479133 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-s9kn2" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.535037 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-cf7rt" podUID="ce66454f-39e9-4aac-9887-987e15252181" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.535080 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-s9kn2" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.535105 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6j54f" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.565207 4773 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5jllv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.565301 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.585960 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.593212 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.593417 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rnd5q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-bwxfc_openshift-marketplace(be427c4b-854d-442d-86b1-19ee1d69814e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.594533 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-bwxfc" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.616912 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98"] Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.617186 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.617201 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.617365 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" containerName="route-controller-manager" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.618272 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.624169 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98"] Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.642679 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-client-ca\") pod \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.642801 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gn2jj\" (UniqueName: \"kubernetes.io/projected/ea377cf5-fbd8-462d-bfea-dd3aca3da018-kube-api-access-gn2jj\") pod \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.642921 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-config\") pod \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.642947 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea377cf5-fbd8-462d-bfea-dd3aca3da018-serving-cert\") pod \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\" (UID: \"ea377cf5-fbd8-462d-bfea-dd3aca3da018\") " Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.643138 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-client-ca\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.643259 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/919880cf-149a-414f-91f0-db3a34ef8ab4-serving-cert\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.644024 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-config\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.644070 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6jjl\" (UniqueName: \"kubernetes.io/projected/919880cf-149a-414f-91f0-db3a34ef8ab4-kube-api-access-w6jjl\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.643982 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-config" (OuterVolumeSpecName: "config") pod "ea377cf5-fbd8-462d-bfea-dd3aca3da018" (UID: "ea377cf5-fbd8-462d-bfea-dd3aca3da018"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.647860 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-client-ca" (OuterVolumeSpecName: "client-ca") pod "ea377cf5-fbd8-462d-bfea-dd3aca3da018" (UID: "ea377cf5-fbd8-462d-bfea-dd3aca3da018"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.660952 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea377cf5-fbd8-462d-bfea-dd3aca3da018-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ea377cf5-fbd8-462d-bfea-dd3aca3da018" (UID: "ea377cf5-fbd8-462d-bfea-dd3aca3da018"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.667535 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea377cf5-fbd8-462d-bfea-dd3aca3da018-kube-api-access-gn2jj" (OuterVolumeSpecName: "kube-api-access-gn2jj") pod "ea377cf5-fbd8-462d-bfea-dd3aca3da018" (UID: "ea377cf5-fbd8-462d-bfea-dd3aca3da018"). InnerVolumeSpecName "kube-api-access-gn2jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.703018 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.703170 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jhclv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-dfvhr_openshift-marketplace(fc3f7a11-33be-4f89-abf4-62f1f13ad823): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.704788 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-dfvhr" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.716161 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.716678 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jbfgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-zvhf9_openshift-marketplace(fbe13262-f7ed-4311-bf73-c2dd21ef3733): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.717854 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-zvhf9" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.745971 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/919880cf-149a-414f-91f0-db3a34ef8ab4-serving-cert\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746014 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-config\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746041 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6jjl\" (UniqueName: \"kubernetes.io/projected/919880cf-149a-414f-91f0-db3a34ef8ab4-kube-api-access-w6jjl\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746075 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-client-ca\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746120 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gn2jj\" (UniqueName: \"kubernetes.io/projected/ea377cf5-fbd8-462d-bfea-dd3aca3da018-kube-api-access-gn2jj\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746131 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746141 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea377cf5-fbd8-462d-bfea-dd3aca3da018-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.746149 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea377cf5-fbd8-462d-bfea-dd3aca3da018-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.748124 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-client-ca\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.752337 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-config\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.754944 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" event={"ID":"ea377cf5-fbd8-462d-bfea-dd3aca3da018","Type":"ContainerDied","Data":"93fc36f8e0a73f53db3a16b3a3eb6c6486f5fa7d5aaf2145151ca528cc9569f7"} Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.755037 4773 scope.go:117] "RemoveContainer" containerID="108029edb25e91ff04a4445e50163c7808179e8f6451ae20a9a52d248631a120" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.755274 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.763653 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-dfvhr" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.766996 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-bwxfc" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.767642 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6jjl\" (UniqueName: \"kubernetes.io/projected/919880cf-149a-414f-91f0-db3a34ef8ab4-kube-api-access-w6jjl\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: E0122 11:58:09.770800 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-zvhf9" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.787825 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/919880cf-149a-414f-91f0-db3a34ef8ab4-serving-cert\") pod \"route-controller-manager-78c85c6445-pss98\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.849828 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv"] Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.854396 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5jllv"] Jan 22 11:58:09 crc kubenswrapper[4773]: I0122 11:58:09.991014 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.197411 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-5tqwr"] Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.204611 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-667964fbf9-n2jf8"] Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.225569 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 11:58:10 crc kubenswrapper[4773]: W0122 11:58:10.229025 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2333d480_ea82_4a7a_89a5_c6576911a2fc.slice/crio-ad3b85f89a1136fe8a33d496f762a61bbac9d1b00406e9ee9643378ba11dee4b WatchSource:0}: Error finding container ad3b85f89a1136fe8a33d496f762a61bbac9d1b00406e9ee9643378ba11dee4b: Status 404 returned error can't find the container with id ad3b85f89a1136fe8a33d496f762a61bbac9d1b00406e9ee9643378ba11dee4b Jan 22 11:58:10 crc kubenswrapper[4773]: W0122 11:58:10.229240 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1ed4b8d_def5_474b_8629_cd0bae7e49a6.slice/crio-a2a0a7745e18777c79d62ab970c0512c9db9d6b61dc91a08b9440b64cf2aabab WatchSource:0}: Error finding container a2a0a7745e18777c79d62ab970c0512c9db9d6b61dc91a08b9440b64cf2aabab: Status 404 returned error can't find the container with id a2a0a7745e18777c79d62ab970c0512c9db9d6b61dc91a08b9440b64cf2aabab Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.240554 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 11:58:10 crc kubenswrapper[4773]: W0122 11:58:10.251947 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podcb1e7d62_6878_4cd6_8e11_ba246971e0ef.slice/crio-b6bcb6b55b6950dc879d7a07de43736274bf067ef91d84aff6a763795656d9b9 WatchSource:0}: Error finding container b6bcb6b55b6950dc879d7a07de43736274bf067ef91d84aff6a763795656d9b9: Status 404 returned error can't find the container with id b6bcb6b55b6950dc879d7a07de43736274bf067ef91d84aff6a763795656d9b9 Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.327150 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98"] Jan 22 11:58:10 crc kubenswrapper[4773]: W0122 11:58:10.333002 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod919880cf_149a_414f_91f0_db3a34ef8ab4.slice/crio-79244f5cb922273ffd8f87b71f6e98d992cf1da0691daad0c8ec5dc286a108da WatchSource:0}: Error finding container 79244f5cb922273ffd8f87b71f6e98d992cf1da0691daad0c8ec5dc286a108da: Status 404 returned error can't find the container with id 79244f5cb922273ffd8f87b71f6e98d992cf1da0691daad0c8ec5dc286a108da Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.664669 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea377cf5-fbd8-462d-bfea-dd3aca3da018" path="/var/lib/kubelet/pods/ea377cf5-fbd8-462d-bfea-dd3aca3da018/volumes" Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.761872 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cb1e7d62-6878-4cd6-8e11-ba246971e0ef","Type":"ContainerStarted","Data":"b6bcb6b55b6950dc879d7a07de43736274bf067ef91d84aff6a763795656d9b9"} Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.762810 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" event={"ID":"c1ed4b8d-def5-474b-8629-cd0bae7e49a6","Type":"ContainerStarted","Data":"a2a0a7745e18777c79d62ab970c0512c9db9d6b61dc91a08b9440b64cf2aabab"} Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.763711 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" event={"ID":"919880cf-149a-414f-91f0-db3a34ef8ab4","Type":"ContainerStarted","Data":"79244f5cb922273ffd8f87b71f6e98d992cf1da0691daad0c8ec5dc286a108da"} Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.765308 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c","Type":"ContainerStarted","Data":"2eb383b986d6a3f2d70bb97abb3ae2eb219c7234ae6ec5b6eca0e1f31e520cfc"} Jan 22 11:58:10 crc kubenswrapper[4773]: I0122 11:58:10.767975 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" event={"ID":"2333d480-ea82-4a7a-89a5-c6576911a2fc","Type":"ContainerStarted","Data":"ad3b85f89a1136fe8a33d496f762a61bbac9d1b00406e9ee9643378ba11dee4b"} Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.773700 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" event={"ID":"919880cf-149a-414f-91f0-db3a34ef8ab4","Type":"ContainerStarted","Data":"7c79b81ebda074bc26703b67a00f3456ea0b47d798468a50e92da00c8c4c8eee"} Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.773952 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.775374 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c","Type":"ContainerStarted","Data":"78e4428fc53257f229a6dc671d3f3bfd258be98b0ae80ee2a21a57a16d09aeee"} Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.776628 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" event={"ID":"2333d480-ea82-4a7a-89a5-c6576911a2fc","Type":"ContainerStarted","Data":"0c1bc66865e98f80cace9769c182057ce227307c20f9fa8b5e6648a64a65a60e"} Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.776715 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" podUID="2333d480-ea82-4a7a-89a5-c6576911a2fc" containerName="controller-manager" containerID="cri-o://0c1bc66865e98f80cace9769c182057ce227307c20f9fa8b5e6648a64a65a60e" gracePeriod=30 Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.776851 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.778771 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cb1e7d62-6878-4cd6-8e11-ba246971e0ef","Type":"ContainerStarted","Data":"277d68184bcc4d55c9df5becc13a19bf69fde5acbec5f59d9446121d0769bb99"} Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.783058 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" event={"ID":"c1ed4b8d-def5-474b-8629-cd0bae7e49a6","Type":"ContainerStarted","Data":"f9715317752341b231515a4bea933466e4b28742bf07c37c0f086c0761a1f502"} Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.784234 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.791339 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.813935 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" podStartSLOduration=37.813922431 podStartE2EDuration="37.813922431s" podCreationTimestamp="2026-01-22 11:57:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:11.813442089 +0000 UTC m=+199.391557914" watchObservedRunningTime="2026-01-22 11:58:11.813922431 +0000 UTC m=+199.392038256" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.814529 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" podStartSLOduration=17.814523116 podStartE2EDuration="17.814523116s" podCreationTimestamp="2026-01-22 11:57:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:11.801678777 +0000 UTC m=+199.379794602" watchObservedRunningTime="2026-01-22 11:58:11.814523116 +0000 UTC m=+199.392638941" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.835326 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=7.8353088589999995 podStartE2EDuration="7.835308859s" podCreationTimestamp="2026-01-22 11:58:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:11.833435161 +0000 UTC m=+199.411550986" watchObservedRunningTime="2026-01-22 11:58:11.835308859 +0000 UTC m=+199.413424684" Jan 22 11:58:11 crc kubenswrapper[4773]: I0122 11:58:11.870190 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=11.870174504 podStartE2EDuration="11.870174504s" podCreationTimestamp="2026-01-22 11:58:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:11.868580163 +0000 UTC m=+199.446695988" watchObservedRunningTime="2026-01-22 11:58:11.870174504 +0000 UTC m=+199.448290319" Jan 22 11:58:12 crc kubenswrapper[4773]: I0122 11:58:12.417690 4773 patch_prober.go:28] interesting pod/controller-manager-667964fbf9-n2jf8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": dial tcp 10.217.0.54:8443: connect: connection refused" start-of-body= Jan 22 11:58:12 crc kubenswrapper[4773]: I0122 11:58:12.418014 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" podUID="2333d480-ea82-4a7a-89a5-c6576911a2fc" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": dial tcp 10.217.0.54:8443: connect: connection refused" Jan 22 11:58:12 crc kubenswrapper[4773]: I0122 11:58:12.790825 4773 generic.go:334] "Generic (PLEG): container finished" podID="2333d480-ea82-4a7a-89a5-c6576911a2fc" containerID="0c1bc66865e98f80cace9769c182057ce227307c20f9fa8b5e6648a64a65a60e" exitCode=0 Jan 22 11:58:12 crc kubenswrapper[4773]: I0122 11:58:12.790891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" event={"ID":"2333d480-ea82-4a7a-89a5-c6576911a2fc","Type":"ContainerDied","Data":"0c1bc66865e98f80cace9769c182057ce227307c20f9fa8b5e6648a64a65a60e"} Jan 22 11:58:12 crc kubenswrapper[4773]: I0122 11:58:12.793664 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-5tqwr" event={"ID":"c1ed4b8d-def5-474b-8629-cd0bae7e49a6","Type":"ContainerStarted","Data":"7b7580f8c6587933850d0ee0f620b9e2d9a61bb5b4b946564b06a0641106d5cd"} Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.470318 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.490159 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-5tqwr" podStartSLOduration=182.490138574 podStartE2EDuration="3m2.490138574s" podCreationTimestamp="2026-01-22 11:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:12.810965563 +0000 UTC m=+200.389081398" watchObservedRunningTime="2026-01-22 11:58:13.490138574 +0000 UTC m=+201.068254399" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.511411 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7555cd99b7-mhxj5"] Jan 22 11:58:13 crc kubenswrapper[4773]: E0122 11:58:13.511655 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2333d480-ea82-4a7a-89a5-c6576911a2fc" containerName="controller-manager" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.511665 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2333d480-ea82-4a7a-89a5-c6576911a2fc" containerName="controller-manager" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.511764 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2333d480-ea82-4a7a-89a5-c6576911a2fc" containerName="controller-manager" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.512128 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.522760 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7555cd99b7-mhxj5"] Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577007 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g56b\" (UniqueName: \"kubernetes.io/projected/2333d480-ea82-4a7a-89a5-c6576911a2fc-kube-api-access-7g56b\") pod \"2333d480-ea82-4a7a-89a5-c6576911a2fc\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577097 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-config\") pod \"2333d480-ea82-4a7a-89a5-c6576911a2fc\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577150 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-proxy-ca-bundles\") pod \"2333d480-ea82-4a7a-89a5-c6576911a2fc\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577172 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-client-ca\") pod \"2333d480-ea82-4a7a-89a5-c6576911a2fc\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577199 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2333d480-ea82-4a7a-89a5-c6576911a2fc-serving-cert\") pod \"2333d480-ea82-4a7a-89a5-c6576911a2fc\" (UID: \"2333d480-ea82-4a7a-89a5-c6576911a2fc\") " Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577951 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-client-ca" (OuterVolumeSpecName: "client-ca") pod "2333d480-ea82-4a7a-89a5-c6576911a2fc" (UID: "2333d480-ea82-4a7a-89a5-c6576911a2fc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.577969 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2333d480-ea82-4a7a-89a5-c6576911a2fc" (UID: "2333d480-ea82-4a7a-89a5-c6576911a2fc"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.578022 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-config" (OuterVolumeSpecName: "config") pod "2333d480-ea82-4a7a-89a5-c6576911a2fc" (UID: "2333d480-ea82-4a7a-89a5-c6576911a2fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.581750 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2333d480-ea82-4a7a-89a5-c6576911a2fc-kube-api-access-7g56b" (OuterVolumeSpecName: "kube-api-access-7g56b") pod "2333d480-ea82-4a7a-89a5-c6576911a2fc" (UID: "2333d480-ea82-4a7a-89a5-c6576911a2fc"). InnerVolumeSpecName "kube-api-access-7g56b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.587385 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2333d480-ea82-4a7a-89a5-c6576911a2fc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2333d480-ea82-4a7a-89a5-c6576911a2fc" (UID: "2333d480-ea82-4a7a-89a5-c6576911a2fc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.678870 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6eabfaa-9738-47a5-b602-7092658b9661-serving-cert\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.678940 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-client-ca\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.678980 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2vp2\" (UniqueName: \"kubernetes.io/projected/c6eabfaa-9738-47a5-b602-7092658b9661-kube-api-access-h2vp2\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679150 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-proxy-ca-bundles\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679209 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-config\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679312 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2333d480-ea82-4a7a-89a5-c6576911a2fc-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679336 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g56b\" (UniqueName: \"kubernetes.io/projected/2333d480-ea82-4a7a-89a5-c6576911a2fc-kube-api-access-7g56b\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679350 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679359 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.679367 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2333d480-ea82-4a7a-89a5-c6576911a2fc-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.780136 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-client-ca\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.780200 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2vp2\" (UniqueName: \"kubernetes.io/projected/c6eabfaa-9738-47a5-b602-7092658b9661-kube-api-access-h2vp2\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.780246 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-proxy-ca-bundles\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.780270 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-config\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.780294 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6eabfaa-9738-47a5-b602-7092658b9661-serving-cert\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.781531 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-client-ca\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.781685 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-config\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.782602 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-proxy-ca-bundles\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.785146 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6eabfaa-9738-47a5-b602-7092658b9661-serving-cert\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.817324 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" event={"ID":"2333d480-ea82-4a7a-89a5-c6576911a2fc","Type":"ContainerDied","Data":"ad3b85f89a1136fe8a33d496f762a61bbac9d1b00406e9ee9643378ba11dee4b"} Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.817367 4773 scope.go:117] "RemoveContainer" containerID="0c1bc66865e98f80cace9769c182057ce227307c20f9fa8b5e6648a64a65a60e" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.817459 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-667964fbf9-n2jf8" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.830104 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2vp2\" (UniqueName: \"kubernetes.io/projected/c6eabfaa-9738-47a5-b602-7092658b9661-kube-api-access-h2vp2\") pod \"controller-manager-7555cd99b7-mhxj5\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.836451 4773 generic.go:334] "Generic (PLEG): container finished" podID="cb1e7d62-6878-4cd6-8e11-ba246971e0ef" containerID="277d68184bcc4d55c9df5becc13a19bf69fde5acbec5f59d9446121d0769bb99" exitCode=0 Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.837260 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cb1e7d62-6878-4cd6-8e11-ba246971e0ef","Type":"ContainerDied","Data":"277d68184bcc4d55c9df5becc13a19bf69fde5acbec5f59d9446121d0769bb99"} Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.893869 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-667964fbf9-n2jf8"] Jan 22 11:58:13 crc kubenswrapper[4773]: I0122 11:58:13.901212 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-667964fbf9-n2jf8"] Jan 22 11:58:14 crc kubenswrapper[4773]: I0122 11:58:14.130154 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:14 crc kubenswrapper[4773]: I0122 11:58:14.354973 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7555cd99b7-mhxj5"] Jan 22 11:58:14 crc kubenswrapper[4773]: I0122 11:58:14.665676 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2333d480-ea82-4a7a-89a5-c6576911a2fc" path="/var/lib/kubelet/pods/2333d480-ea82-4a7a-89a5-c6576911a2fc/volumes" Jan 22 11:58:14 crc kubenswrapper[4773]: I0122 11:58:14.847645 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" event={"ID":"c6eabfaa-9738-47a5-b602-7092658b9661","Type":"ContainerStarted","Data":"e22d510fe3de26f73628f0533e67664212d53e4cab211ad08328a865c5911d54"} Jan 22 11:58:14 crc kubenswrapper[4773]: I0122 11:58:14.847690 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" event={"ID":"c6eabfaa-9738-47a5-b602-7092658b9661","Type":"ContainerStarted","Data":"8bcd6997cfe9a92a6c37d0a714bbf27b4967e2cc0521cbd08f254d33bbec7e46"} Jan 22 11:58:14 crc kubenswrapper[4773]: I0122 11:58:14.918964 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" podStartSLOduration=20.918945781 podStartE2EDuration="20.918945781s" podCreationTimestamp="2026-01-22 11:57:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:14.917164486 +0000 UTC m=+202.495280311" watchObservedRunningTime="2026-01-22 11:58:14.918945781 +0000 UTC m=+202.497061606" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.480347 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.627205 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kubelet-dir\") pod \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.627252 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kube-api-access\") pod \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\" (UID: \"cb1e7d62-6878-4cd6-8e11-ba246971e0ef\") " Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.627384 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "cb1e7d62-6878-4cd6-8e11-ba246971e0ef" (UID: "cb1e7d62-6878-4cd6-8e11-ba246971e0ef"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.627576 4773 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.639477 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "cb1e7d62-6878-4cd6-8e11-ba246971e0ef" (UID: "cb1e7d62-6878-4cd6-8e11-ba246971e0ef"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.728574 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1e7d62-6878-4cd6-8e11-ba246971e0ef-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.854344 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.854336 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"cb1e7d62-6878-4cd6-8e11-ba246971e0ef","Type":"ContainerDied","Data":"b6bcb6b55b6950dc879d7a07de43736274bf067ef91d84aff6a763795656d9b9"} Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.854485 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6bcb6b55b6950dc879d7a07de43736274bf067ef91d84aff6a763795656d9b9" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.854650 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:15 crc kubenswrapper[4773]: I0122 11:58:15.860204 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:22 crc kubenswrapper[4773]: I0122 11:58:22.893155 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerStarted","Data":"839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1"} Jan 22 11:58:23 crc kubenswrapper[4773]: I0122 11:58:23.899607 4773 generic.go:334] "Generic (PLEG): container finished" podID="11240379-c011-45ef-8626-f59a8772a5ce" containerID="839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1" exitCode=0 Jan 22 11:58:23 crc kubenswrapper[4773]: I0122 11:58:23.899786 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerDied","Data":"839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1"} Jan 22 11:58:24 crc kubenswrapper[4773]: I0122 11:58:24.907994 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerStarted","Data":"e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939"} Jan 22 11:58:24 crc kubenswrapper[4773]: I0122 11:58:24.909864 4773 generic.go:334] "Generic (PLEG): container finished" podID="be427c4b-854d-442d-86b1-19ee1d69814e" containerID="b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b" exitCode=0 Jan 22 11:58:24 crc kubenswrapper[4773]: I0122 11:58:24.909920 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bwxfc" event={"ID":"be427c4b-854d-442d-86b1-19ee1d69814e","Type":"ContainerDied","Data":"b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b"} Jan 22 11:58:24 crc kubenswrapper[4773]: I0122 11:58:24.916458 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerStarted","Data":"e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4"} Jan 22 11:58:24 crc kubenswrapper[4773]: I0122 11:58:24.921930 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerStarted","Data":"f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7"} Jan 22 11:58:24 crc kubenswrapper[4773]: I0122 11:58:24.929252 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qv7sq" podStartSLOduration=2.889580962 podStartE2EDuration="1m9.929230744s" podCreationTimestamp="2026-01-22 11:57:15 +0000 UTC" firstStartedPulling="2026-01-22 11:57:17.324395025 +0000 UTC m=+144.902510850" lastFinishedPulling="2026-01-22 11:58:24.364044807 +0000 UTC m=+211.942160632" observedRunningTime="2026-01-22 11:58:24.925535219 +0000 UTC m=+212.503651084" watchObservedRunningTime="2026-01-22 11:58:24.929230744 +0000 UTC m=+212.507346579" Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.904343 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.904401 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.935480 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bwxfc" event={"ID":"be427c4b-854d-442d-86b1-19ee1d69814e","Type":"ContainerStarted","Data":"e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e"} Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.939425 4773 generic.go:334] "Generic (PLEG): container finished" podID="ce66454f-39e9-4aac-9887-987e15252181" containerID="28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c" exitCode=0 Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.939469 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf7rt" event={"ID":"ce66454f-39e9-4aac-9887-987e15252181","Type":"ContainerDied","Data":"28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c"} Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.942384 4773 generic.go:334] "Generic (PLEG): container finished" podID="44223378-fd96-41e8-9189-558b02fb1d2d" containerID="7eeb7e594ab3502daae4f46ef9240936aa7d59abfbe5f749a3b4b37706af1a31" exitCode=0 Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.942429 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vfkxp" event={"ID":"44223378-fd96-41e8-9189-558b02fb1d2d","Type":"ContainerDied","Data":"7eeb7e594ab3502daae4f46ef9240936aa7d59abfbe5f749a3b4b37706af1a31"} Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.947266 4773 generic.go:334] "Generic (PLEG): container finished" podID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerID="f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7" exitCode=0 Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.947330 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerDied","Data":"f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7"} Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.950716 4773 generic.go:334] "Generic (PLEG): container finished" podID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerID="fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446" exitCode=0 Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.950774 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zvhf9" event={"ID":"fbe13262-f7ed-4311-bf73-c2dd21ef3733","Type":"ContainerDied","Data":"fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446"} Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.955122 4773 generic.go:334] "Generic (PLEG): container finished" podID="792b0be2-378b-4296-87ff-ca27a76013bd" containerID="0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984" exitCode=0 Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.955675 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6j54f" event={"ID":"792b0be2-378b-4296-87ff-ca27a76013bd","Type":"ContainerDied","Data":"0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984"} Jan 22 11:58:25 crc kubenswrapper[4773]: I0122 11:58:25.976153 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bwxfc" podStartSLOduration=1.98032337 podStartE2EDuration="1m8.976133496s" podCreationTimestamp="2026-01-22 11:57:17 +0000 UTC" firstStartedPulling="2026-01-22 11:57:18.38262232 +0000 UTC m=+145.960738145" lastFinishedPulling="2026-01-22 11:58:25.378432446 +0000 UTC m=+212.956548271" observedRunningTime="2026-01-22 11:58:25.962110156 +0000 UTC m=+213.540225981" watchObservedRunningTime="2026-01-22 11:58:25.976133496 +0000 UTC m=+213.554249331" Jan 22 11:58:26 crc kubenswrapper[4773]: I0122 11:58:26.962953 4773 generic.go:334] "Generic (PLEG): container finished" podID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerID="e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4" exitCode=0 Jan 22 11:58:26 crc kubenswrapper[4773]: I0122 11:58:26.963042 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerDied","Data":"e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4"} Jan 22 11:58:26 crc kubenswrapper[4773]: I0122 11:58:26.998947 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qv7sq" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="registry-server" probeResult="failure" output=< Jan 22 11:58:26 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 11:58:26 crc kubenswrapper[4773]: > Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.525751 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.525843 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.973926 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf7rt" event={"ID":"ce66454f-39e9-4aac-9887-987e15252181","Type":"ContainerStarted","Data":"5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a"} Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.976680 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vfkxp" event={"ID":"44223378-fd96-41e8-9189-558b02fb1d2d","Type":"ContainerStarted","Data":"a4cfd3cb69a3d9bb684fcb81df94582025d315d689175025889b9a93923b2b9b"} Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.980259 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerStarted","Data":"a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70"} Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.983146 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zvhf9" event={"ID":"fbe13262-f7ed-4311-bf73-c2dd21ef3733","Type":"ContainerStarted","Data":"3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f"} Jan 22 11:58:27 crc kubenswrapper[4773]: I0122 11:58:27.985360 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6j54f" event={"ID":"792b0be2-378b-4296-87ff-ca27a76013bd","Type":"ContainerStarted","Data":"512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f"} Jan 22 11:58:28 crc kubenswrapper[4773]: I0122 11:58:28.026903 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zvhf9" podStartSLOduration=3.643031041 podStartE2EDuration="1m11.026875085s" podCreationTimestamp="2026-01-22 11:57:17 +0000 UTC" firstStartedPulling="2026-01-22 11:57:19.43361239 +0000 UTC m=+147.011728215" lastFinishedPulling="2026-01-22 11:58:26.817456434 +0000 UTC m=+214.395572259" observedRunningTime="2026-01-22 11:58:28.024591026 +0000 UTC m=+215.602706881" watchObservedRunningTime="2026-01-22 11:58:28.026875085 +0000 UTC m=+215.604990910" Jan 22 11:58:28 crc kubenswrapper[4773]: I0122 11:58:28.027456 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cf7rt" podStartSLOduration=4.046056263 podStartE2EDuration="1m13.027446019s" podCreationTimestamp="2026-01-22 11:57:15 +0000 UTC" firstStartedPulling="2026-01-22 11:57:17.333977061 +0000 UTC m=+144.912092886" lastFinishedPulling="2026-01-22 11:58:26.315366827 +0000 UTC m=+213.893482642" observedRunningTime="2026-01-22 11:58:27.996506916 +0000 UTC m=+215.574622741" watchObservedRunningTime="2026-01-22 11:58:28.027446019 +0000 UTC m=+215.605561844" Jan 22 11:58:28 crc kubenswrapper[4773]: I0122 11:58:28.368337 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vfkxp" podStartSLOduration=4.444549437 podStartE2EDuration="1m10.368314543s" podCreationTimestamp="2026-01-22 11:57:18 +0000 UTC" firstStartedPulling="2026-01-22 11:57:20.443272347 +0000 UTC m=+148.021388172" lastFinishedPulling="2026-01-22 11:58:26.367037453 +0000 UTC m=+213.945153278" observedRunningTime="2026-01-22 11:58:28.360226035 +0000 UTC m=+215.938341860" watchObservedRunningTime="2026-01-22 11:58:28.368314543 +0000 UTC m=+215.946430368" Jan 22 11:58:28 crc kubenswrapper[4773]: I0122 11:58:28.395675 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6j54f" podStartSLOduration=4.368272797 podStartE2EDuration="1m13.395655144s" podCreationTimestamp="2026-01-22 11:57:15 +0000 UTC" firstStartedPulling="2026-01-22 11:57:17.317006455 +0000 UTC m=+144.895122270" lastFinishedPulling="2026-01-22 11:58:26.344388792 +0000 UTC m=+213.922504617" observedRunningTime="2026-01-22 11:58:28.39316862 +0000 UTC m=+215.971284445" watchObservedRunningTime="2026-01-22 11:58:28.395655144 +0000 UTC m=+215.973770969" Jan 22 11:58:28 crc kubenswrapper[4773]: I0122 11:58:28.438230 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s9kn2" podStartSLOduration=4.23621755 podStartE2EDuration="1m13.438208055s" podCreationTimestamp="2026-01-22 11:57:15 +0000 UTC" firstStartedPulling="2026-01-22 11:57:17.343475413 +0000 UTC m=+144.921591238" lastFinishedPulling="2026-01-22 11:58:26.545465918 +0000 UTC m=+214.123581743" observedRunningTime="2026-01-22 11:58:28.433220227 +0000 UTC m=+216.011336052" watchObservedRunningTime="2026-01-22 11:58:28.438208055 +0000 UTC m=+216.016323880" Jan 22 11:58:29 crc kubenswrapper[4773]: I0122 11:58:29.358902 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:58:29 crc kubenswrapper[4773]: I0122 11:58:29.359152 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:58:29 crc kubenswrapper[4773]: I0122 11:58:29.390351 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-bwxfc" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="registry-server" probeResult="failure" output=< Jan 22 11:58:29 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 11:58:29 crc kubenswrapper[4773]: > Jan 22 11:58:29 crc kubenswrapper[4773]: I0122 11:58:29.417317 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerStarted","Data":"822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2"} Jan 22 11:58:30 crc kubenswrapper[4773]: I0122 11:58:30.446852 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dfvhr" podStartSLOduration=5.040282296 podStartE2EDuration="1m12.446832534s" podCreationTimestamp="2026-01-22 11:57:18 +0000 UTC" firstStartedPulling="2026-01-22 11:57:20.422346359 +0000 UTC m=+148.000462184" lastFinishedPulling="2026-01-22 11:58:27.828896597 +0000 UTC m=+215.407012422" observedRunningTime="2026-01-22 11:58:30.446584177 +0000 UTC m=+218.024700012" watchObservedRunningTime="2026-01-22 11:58:30.446832534 +0000 UTC m=+218.024948359" Jan 22 11:58:30 crc kubenswrapper[4773]: I0122 11:58:30.566038 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vfkxp" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="registry-server" probeResult="failure" output=< Jan 22 11:58:30 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 11:58:30 crc kubenswrapper[4773]: > Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.074668 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.075011 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.075064 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.075704 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.075805 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6" gracePeriod=600 Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.127266 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7555cd99b7-mhxj5"] Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.127829 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" podUID="c6eabfaa-9738-47a5-b602-7092658b9661" containerName="controller-manager" containerID="cri-o://e22d510fe3de26f73628f0533e67664212d53e4cab211ad08328a865c5911d54" gracePeriod=30 Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.242642 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98"] Jan 22 11:58:34 crc kubenswrapper[4773]: I0122 11:58:34.243117 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" podUID="919880cf-149a-414f-91f0-db3a34ef8ab4" containerName="route-controller-manager" containerID="cri-o://7c79b81ebda074bc26703b67a00f3456ea0b47d798468a50e92da00c8c4c8eee" gracePeriod=30 Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.510500 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.510912 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.684332 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.707566 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.707808 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.756939 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:58:35 crc kubenswrapper[4773]: I0122 11:58:35.952763 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.002481 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.154114 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.154156 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.211699 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.464650 4773 generic.go:334] "Generic (PLEG): container finished" podID="c6eabfaa-9738-47a5-b602-7092658b9661" containerID="e22d510fe3de26f73628f0533e67664212d53e4cab211ad08328a865c5911d54" exitCode=0 Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.464718 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" event={"ID":"c6eabfaa-9738-47a5-b602-7092658b9661","Type":"ContainerDied","Data":"e22d510fe3de26f73628f0533e67664212d53e4cab211ad08328a865c5911d54"} Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.466426 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6" exitCode=0 Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.466513 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6"} Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.509872 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.521945 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.533488 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.929163 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.957114 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8"] Jan 22 11:58:36 crc kubenswrapper[4773]: E0122 11:58:36.957397 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb1e7d62-6878-4cd6-8e11-ba246971e0ef" containerName="pruner" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.957413 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb1e7d62-6878-4cd6-8e11-ba246971e0ef" containerName="pruner" Jan 22 11:58:36 crc kubenswrapper[4773]: E0122 11:58:36.957436 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6eabfaa-9738-47a5-b602-7092658b9661" containerName="controller-manager" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.957445 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6eabfaa-9738-47a5-b602-7092658b9661" containerName="controller-manager" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.957562 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6eabfaa-9738-47a5-b602-7092658b9661" containerName="controller-manager" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.957574 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb1e7d62-6878-4cd6-8e11-ba246971e0ef" containerName="pruner" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.959024 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.963173 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a82d4a4-46ea-49de-89bb-9e8057ca5487-serving-cert\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.963257 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pslg7\" (UniqueName: \"kubernetes.io/projected/8a82d4a4-46ea-49de-89bb-9e8057ca5487-kube-api-access-pslg7\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.963346 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-config\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.963395 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-proxy-ca-bundles\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.963423 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-client-ca\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:36 crc kubenswrapper[4773]: I0122 11:58:36.969320 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8"] Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.064428 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-proxy-ca-bundles\") pod \"c6eabfaa-9738-47a5-b602-7092658b9661\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.064567 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6eabfaa-9738-47a5-b602-7092658b9661-serving-cert\") pod \"c6eabfaa-9738-47a5-b602-7092658b9661\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.064604 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-config\") pod \"c6eabfaa-9738-47a5-b602-7092658b9661\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.064775 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-client-ca\") pod \"c6eabfaa-9738-47a5-b602-7092658b9661\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.064850 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2vp2\" (UniqueName: \"kubernetes.io/projected/c6eabfaa-9738-47a5-b602-7092658b9661-kube-api-access-h2vp2\") pod \"c6eabfaa-9738-47a5-b602-7092658b9661\" (UID: \"c6eabfaa-9738-47a5-b602-7092658b9661\") " Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.065067 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a82d4a4-46ea-49de-89bb-9e8057ca5487-serving-cert\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.065129 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pslg7\" (UniqueName: \"kubernetes.io/projected/8a82d4a4-46ea-49de-89bb-9e8057ca5487-kube-api-access-pslg7\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.065169 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-config\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.065214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-proxy-ca-bundles\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.065243 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-client-ca\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.065453 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c6eabfaa-9738-47a5-b602-7092658b9661" (UID: "c6eabfaa-9738-47a5-b602-7092658b9661"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.066501 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-client-ca\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.066847 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-config" (OuterVolumeSpecName: "config") pod "c6eabfaa-9738-47a5-b602-7092658b9661" (UID: "c6eabfaa-9738-47a5-b602-7092658b9661"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.067612 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-client-ca" (OuterVolumeSpecName: "client-ca") pod "c6eabfaa-9738-47a5-b602-7092658b9661" (UID: "c6eabfaa-9738-47a5-b602-7092658b9661"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.068352 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-config\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.069272 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-proxy-ca-bundles\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.072873 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a82d4a4-46ea-49de-89bb-9e8057ca5487-serving-cert\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.082630 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6eabfaa-9738-47a5-b602-7092658b9661-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c6eabfaa-9738-47a5-b602-7092658b9661" (UID: "c6eabfaa-9738-47a5-b602-7092658b9661"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.082631 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6eabfaa-9738-47a5-b602-7092658b9661-kube-api-access-h2vp2" (OuterVolumeSpecName: "kube-api-access-h2vp2") pod "c6eabfaa-9738-47a5-b602-7092658b9661" (UID: "c6eabfaa-9738-47a5-b602-7092658b9661"). InnerVolumeSpecName "kube-api-access-h2vp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.094349 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pslg7\" (UniqueName: \"kubernetes.io/projected/8a82d4a4-46ea-49de-89bb-9e8057ca5487-kube-api-access-pslg7\") pod \"controller-manager-7c5bc7b89d-hgvp8\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.166343 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.166403 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6eabfaa-9738-47a5-b602-7092658b9661-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.166416 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.166428 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6eabfaa-9738-47a5-b602-7092658b9661-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.166442 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2vp2\" (UniqueName: \"kubernetes.io/projected/c6eabfaa-9738-47a5-b602-7092658b9661-kube-api-access-h2vp2\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.275119 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.476178 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" event={"ID":"c6eabfaa-9738-47a5-b602-7092658b9661","Type":"ContainerDied","Data":"8bcd6997cfe9a92a6c37d0a714bbf27b4967e2cc0521cbd08f254d33bbec7e46"} Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.476585 4773 scope.go:117] "RemoveContainer" containerID="e22d510fe3de26f73628f0533e67664212d53e4cab211ad08328a865c5911d54" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.476764 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7555cd99b7-mhxj5" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.478550 4773 generic.go:334] "Generic (PLEG): container finished" podID="919880cf-149a-414f-91f0-db3a34ef8ab4" containerID="7c79b81ebda074bc26703b67a00f3456ea0b47d798468a50e92da00c8c4c8eee" exitCode=0 Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.479569 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" event={"ID":"919880cf-149a-414f-91f0-db3a34ef8ab4","Type":"ContainerDied","Data":"7c79b81ebda074bc26703b67a00f3456ea0b47d798468a50e92da00c8c4c8eee"} Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.481478 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8"] Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.512162 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7555cd99b7-mhxj5"] Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.514717 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7555cd99b7-mhxj5"] Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.596478 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.607092 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qv7sq"] Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.607340 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qv7sq" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="registry-server" containerID="cri-o://e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939" gracePeriod=2 Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.642642 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.917849 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.917928 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:58:37 crc kubenswrapper[4773]: I0122 11:58:37.955807 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.485998 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" event={"ID":"8a82d4a4-46ea-49de-89bb-9e8057ca5487","Type":"ContainerStarted","Data":"ddf08adc0536e8cf4731e36aa668602b6892cb2d40bd833bdc0eabb2d876733b"} Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.526546 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.607652 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6j54f"] Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.607924 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6j54f" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="registry-server" containerID="cri-o://512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f" gracePeriod=2 Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.666676 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6eabfaa-9738-47a5-b602-7092658b9661" path="/var/lib/kubelet/pods/c6eabfaa-9738-47a5-b602-7092658b9661/volumes" Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.713255 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.713414 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:58:38 crc kubenswrapper[4773]: I0122 11:58:38.754350 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.055366 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.062984 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085325 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw"] Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.085585 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="extract-utilities" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085605 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="extract-utilities" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.085622 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="extract-content" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085629 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="extract-content" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.085641 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="919880cf-149a-414f-91f0-db3a34ef8ab4" containerName="route-controller-manager" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085647 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="919880cf-149a-414f-91f0-db3a34ef8ab4" containerName="route-controller-manager" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.085659 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="registry-server" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085664 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="registry-server" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085777 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="919880cf-149a-414f-91f0-db3a34ef8ab4" containerName="route-controller-manager" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.085800 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" containerName="registry-server" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.086207 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.093862 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.116395 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193628 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6jjl\" (UniqueName: \"kubernetes.io/projected/919880cf-149a-414f-91f0-db3a34ef8ab4-kube-api-access-w6jjl\") pod \"919880cf-149a-414f-91f0-db3a34ef8ab4\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193682 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-client-ca\") pod \"919880cf-149a-414f-91f0-db3a34ef8ab4\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193728 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/919880cf-149a-414f-91f0-db3a34ef8ab4-serving-cert\") pod \"919880cf-149a-414f-91f0-db3a34ef8ab4\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193764 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgqmm\" (UniqueName: \"kubernetes.io/projected/792b0be2-378b-4296-87ff-ca27a76013bd-kube-api-access-hgqmm\") pod \"792b0be2-378b-4296-87ff-ca27a76013bd\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193816 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-catalog-content\") pod \"792b0be2-378b-4296-87ff-ca27a76013bd\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193944 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-utilities\") pod \"792b0be2-378b-4296-87ff-ca27a76013bd\" (UID: \"792b0be2-378b-4296-87ff-ca27a76013bd\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.193976 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-config\") pod \"919880cf-149a-414f-91f0-db3a34ef8ab4\" (UID: \"919880cf-149a-414f-91f0-db3a34ef8ab4\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.196067 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-client-ca" (OuterVolumeSpecName: "client-ca") pod "919880cf-149a-414f-91f0-db3a34ef8ab4" (UID: "919880cf-149a-414f-91f0-db3a34ef8ab4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.196197 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hwcm\" (UniqueName: \"kubernetes.io/projected/7e5c856b-d1a2-49d3-ba56-58711763552c-kube-api-access-5hwcm\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.196322 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-config\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.196490 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-client-ca\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.196547 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e5c856b-d1a2-49d3-ba56-58711763552c-serving-cert\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.196718 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.197417 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-config" (OuterVolumeSpecName: "config") pod "919880cf-149a-414f-91f0-db3a34ef8ab4" (UID: "919880cf-149a-414f-91f0-db3a34ef8ab4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.201902 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-utilities" (OuterVolumeSpecName: "utilities") pod "792b0be2-378b-4296-87ff-ca27a76013bd" (UID: "792b0be2-378b-4296-87ff-ca27a76013bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.207650 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/919880cf-149a-414f-91f0-db3a34ef8ab4-kube-api-access-w6jjl" (OuterVolumeSpecName: "kube-api-access-w6jjl") pod "919880cf-149a-414f-91f0-db3a34ef8ab4" (UID: "919880cf-149a-414f-91f0-db3a34ef8ab4"). InnerVolumeSpecName "kube-api-access-w6jjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.209813 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/919880cf-149a-414f-91f0-db3a34ef8ab4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "919880cf-149a-414f-91f0-db3a34ef8ab4" (UID: "919880cf-149a-414f-91f0-db3a34ef8ab4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.221041 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/792b0be2-378b-4296-87ff-ca27a76013bd-kube-api-access-hgqmm" (OuterVolumeSpecName: "kube-api-access-hgqmm") pod "792b0be2-378b-4296-87ff-ca27a76013bd" (UID: "792b0be2-378b-4296-87ff-ca27a76013bd"). InnerVolumeSpecName "kube-api-access-hgqmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.233954 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.279357 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.297224 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-utilities\") pod \"11240379-c011-45ef-8626-f59a8772a5ce\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.297303 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2x9c\" (UniqueName: \"kubernetes.io/projected/11240379-c011-45ef-8626-f59a8772a5ce-kube-api-access-z2x9c\") pod \"11240379-c011-45ef-8626-f59a8772a5ce\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.297339 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-catalog-content\") pod \"11240379-c011-45ef-8626-f59a8772a5ce\" (UID: \"11240379-c011-45ef-8626-f59a8772a5ce\") " Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.297536 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e5c856b-d1a2-49d3-ba56-58711763552c-serving-cert\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.297597 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hwcm\" (UniqueName: \"kubernetes.io/projected/7e5c856b-d1a2-49d3-ba56-58711763552c-kube-api-access-5hwcm\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.297627 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-config\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.298017 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-client-ca\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.298173 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.298192 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/919880cf-149a-414f-91f0-db3a34ef8ab4-config\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.298207 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6jjl\" (UniqueName: \"kubernetes.io/projected/919880cf-149a-414f-91f0-db3a34ef8ab4-kube-api-access-w6jjl\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.298232 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/919880cf-149a-414f-91f0-db3a34ef8ab4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.298254 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgqmm\" (UniqueName: \"kubernetes.io/projected/792b0be2-378b-4296-87ff-ca27a76013bd-kube-api-access-hgqmm\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.299557 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-client-ca\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.303608 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-config\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.304331 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e5c856b-d1a2-49d3-ba56-58711763552c-serving-cert\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.304954 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-utilities" (OuterVolumeSpecName: "utilities") pod "11240379-c011-45ef-8626-f59a8772a5ce" (UID: "11240379-c011-45ef-8626-f59a8772a5ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.306332 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11240379-c011-45ef-8626-f59a8772a5ce-kube-api-access-z2x9c" (OuterVolumeSpecName: "kube-api-access-z2x9c") pod "11240379-c011-45ef-8626-f59a8772a5ce" (UID: "11240379-c011-45ef-8626-f59a8772a5ce"). InnerVolumeSpecName "kube-api-access-z2x9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.325510 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hwcm\" (UniqueName: \"kubernetes.io/projected/7e5c856b-d1a2-49d3-ba56-58711763552c-kube-api-access-5hwcm\") pod \"route-controller-manager-7967db8687-l68jw\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.333742 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "792b0be2-378b-4296-87ff-ca27a76013bd" (UID: "792b0be2-378b-4296-87ff-ca27a76013bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.367395 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11240379-c011-45ef-8626-f59a8772a5ce" (UID: "11240379-c011-45ef-8626-f59a8772a5ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.400571 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.400606 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2x9c\" (UniqueName: \"kubernetes.io/projected/11240379-c011-45ef-8626-f59a8772a5ce-kube-api-access-z2x9c\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.400621 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11240379-c011-45ef-8626-f59a8772a5ce-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.400632 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/792b0be2-378b-4296-87ff-ca27a76013bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.428371 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.495848 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" event={"ID":"919880cf-149a-414f-91f0-db3a34ef8ab4","Type":"ContainerDied","Data":"79244f5cb922273ffd8f87b71f6e98d992cf1da0691daad0c8ec5dc286a108da"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.495927 4773 scope.go:117] "RemoveContainer" containerID="7c79b81ebda074bc26703b67a00f3456ea0b47d798468a50e92da00c8c4c8eee" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.495877 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.502744 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"267acbc2d90752d5471c6ac9652aabd9b488a23103e0c15252485cd6a27ff8d2"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.507633 4773 generic.go:334] "Generic (PLEG): container finished" podID="11240379-c011-45ef-8626-f59a8772a5ce" containerID="e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939" exitCode=0 Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.507707 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerDied","Data":"e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.507982 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qv7sq" event={"ID":"11240379-c011-45ef-8626-f59a8772a5ce","Type":"ContainerDied","Data":"25ffffbfae3fd3bb7a26e94568ba4fc67346e0574ec3ed59dc4f2e98a0dde41b"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.507765 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qv7sq" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.510268 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" event={"ID":"8a82d4a4-46ea-49de-89bb-9e8057ca5487","Type":"ContainerStarted","Data":"2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.510429 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.516851 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.518663 4773 generic.go:334] "Generic (PLEG): container finished" podID="792b0be2-378b-4296-87ff-ca27a76013bd" containerID="512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f" exitCode=0 Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.518749 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6j54f" event={"ID":"792b0be2-378b-4296-87ff-ca27a76013bd","Type":"ContainerDied","Data":"512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.518801 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6j54f" event={"ID":"792b0be2-378b-4296-87ff-ca27a76013bd","Type":"ContainerDied","Data":"a8ffd972ceae841c5e7b477d17d4c88140f9fddb9acc95214464a3d4f1c94df5"} Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.519506 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6j54f" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.525655 4773 scope.go:117] "RemoveContainer" containerID="e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.537055 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.554114 4773 scope.go:117] "RemoveContainer" containerID="839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.555617 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-78c85c6445-pss98"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.573544 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" podStartSLOduration=5.57259517 podStartE2EDuration="5.57259517s" podCreationTimestamp="2026-01-22 11:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:39.569923092 +0000 UTC m=+227.148038937" watchObservedRunningTime="2026-01-22 11:58:39.57259517 +0000 UTC m=+227.150710995" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.576748 4773 scope.go:117] "RemoveContainer" containerID="1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.591719 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.593348 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qv7sq"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.597593 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qv7sq"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.605103 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6j54f"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.610133 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6j54f"] Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.633078 4773 scope.go:117] "RemoveContainer" containerID="e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.633856 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939\": container with ID starting with e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939 not found: ID does not exist" containerID="e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.633897 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939"} err="failed to get container status \"e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939\": rpc error: code = NotFound desc = could not find container \"e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939\": container with ID starting with e10bfdc3ee3d45288b364da8a51d7428f4179dd19d8e3bce3e1baebaf8ed6939 not found: ID does not exist" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.633926 4773 scope.go:117] "RemoveContainer" containerID="839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.635118 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1\": container with ID starting with 839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1 not found: ID does not exist" containerID="839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.635149 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1"} err="failed to get container status \"839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1\": rpc error: code = NotFound desc = could not find container \"839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1\": container with ID starting with 839d9030b9d80a47d6e55bd9eab857f5fb9e2e72c8691cb0148b9164282c9cd1 not found: ID does not exist" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.635173 4773 scope.go:117] "RemoveContainer" containerID="1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.635458 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb\": container with ID starting with 1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb not found: ID does not exist" containerID="1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.635479 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb"} err="failed to get container status \"1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb\": rpc error: code = NotFound desc = could not find container \"1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb\": container with ID starting with 1680fa1addbbafab82f2329e8b2d264ddef94e9be2c284b8d044548c3b4f15fb not found: ID does not exist" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.635495 4773 scope.go:117] "RemoveContainer" containerID="512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.668727 4773 scope.go:117] "RemoveContainer" containerID="0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.683357 4773 scope.go:117] "RemoveContainer" containerID="a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.701242 4773 scope.go:117] "RemoveContainer" containerID="512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.701763 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f\": container with ID starting with 512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f not found: ID does not exist" containerID="512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.701805 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f"} err="failed to get container status \"512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f\": rpc error: code = NotFound desc = could not find container \"512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f\": container with ID starting with 512e03e1615a5bea55745c4e82c483373687a6aaa1e64c111a4fb9d58549b55f not found: ID does not exist" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.701839 4773 scope.go:117] "RemoveContainer" containerID="0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.702339 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984\": container with ID starting with 0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984 not found: ID does not exist" containerID="0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.702427 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984"} err="failed to get container status \"0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984\": rpc error: code = NotFound desc = could not find container \"0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984\": container with ID starting with 0ab14b22150b0504fda02d63f5235cd6c1eef50178b09be940458cc0b7c6a984 not found: ID does not exist" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.702482 4773 scope.go:117] "RemoveContainer" containerID="a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09" Jan 22 11:58:39 crc kubenswrapper[4773]: E0122 11:58:39.702973 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09\": container with ID starting with a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09 not found: ID does not exist" containerID="a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.703006 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09"} err="failed to get container status \"a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09\": rpc error: code = NotFound desc = could not find container \"a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09\": container with ID starting with a0a2a9173fad12c327ffe6e177e7b9e43225fec82cd99332b2908d56748a0a09 not found: ID does not exist" Jan 22 11:58:39 crc kubenswrapper[4773]: I0122 11:58:39.888304 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw"] Jan 22 11:58:39 crc kubenswrapper[4773]: W0122 11:58:39.893823 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e5c856b_d1a2_49d3_ba56_58711763552c.slice/crio-1a4efa7665d808fde935b3d94570243127a6a439057a1a49dc9b921aad5359cd WatchSource:0}: Error finding container 1a4efa7665d808fde935b3d94570243127a6a439057a1a49dc9b921aad5359cd: Status 404 returned error can't find the container with id 1a4efa7665d808fde935b3d94570243127a6a439057a1a49dc9b921aad5359cd Jan 22 11:58:40 crc kubenswrapper[4773]: I0122 11:58:40.527443 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" event={"ID":"7e5c856b-d1a2-49d3-ba56-58711763552c","Type":"ContainerStarted","Data":"1a4efa7665d808fde935b3d94570243127a6a439057a1a49dc9b921aad5359cd"} Jan 22 11:58:40 crc kubenswrapper[4773]: I0122 11:58:40.666927 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11240379-c011-45ef-8626-f59a8772a5ce" path="/var/lib/kubelet/pods/11240379-c011-45ef-8626-f59a8772a5ce/volumes" Jan 22 11:58:40 crc kubenswrapper[4773]: I0122 11:58:40.667933 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="792b0be2-378b-4296-87ff-ca27a76013bd" path="/var/lib/kubelet/pods/792b0be2-378b-4296-87ff-ca27a76013bd/volumes" Jan 22 11:58:40 crc kubenswrapper[4773]: I0122 11:58:40.668909 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="919880cf-149a-414f-91f0-db3a34ef8ab4" path="/var/lib/kubelet/pods/919880cf-149a-414f-91f0-db3a34ef8ab4/volumes" Jan 22 11:58:41 crc kubenswrapper[4773]: I0122 11:58:41.006712 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zvhf9"] Jan 22 11:58:41 crc kubenswrapper[4773]: I0122 11:58:41.006929 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zvhf9" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="registry-server" containerID="cri-o://3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f" gracePeriod=2 Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.425977 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.546580 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" event={"ID":"7e5c856b-d1a2-49d3-ba56-58711763552c","Type":"ContainerStarted","Data":"617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1"} Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.546847 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.549437 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-catalog-content\") pod \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.549536 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-utilities\") pod \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.549632 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbfgd\" (UniqueName: \"kubernetes.io/projected/fbe13262-f7ed-4311-bf73-c2dd21ef3733-kube-api-access-jbfgd\") pod \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\" (UID: \"fbe13262-f7ed-4311-bf73-c2dd21ef3733\") " Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.550398 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-utilities" (OuterVolumeSpecName: "utilities") pod "fbe13262-f7ed-4311-bf73-c2dd21ef3733" (UID: "fbe13262-f7ed-4311-bf73-c2dd21ef3733"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.551045 4773 generic.go:334] "Generic (PLEG): container finished" podID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerID="3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f" exitCode=0 Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.551076 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zvhf9" event={"ID":"fbe13262-f7ed-4311-bf73-c2dd21ef3733","Type":"ContainerDied","Data":"3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f"} Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.551100 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zvhf9" event={"ID":"fbe13262-f7ed-4311-bf73-c2dd21ef3733","Type":"ContainerDied","Data":"69de6be4a5c5178a02c10c17637fafd7d4db90c5cf099f2a30cbc53cd71401c9"} Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.551118 4773 scope.go:117] "RemoveContainer" containerID="3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.551233 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zvhf9" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.560471 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbe13262-f7ed-4311-bf73-c2dd21ef3733-kube-api-access-jbfgd" (OuterVolumeSpecName: "kube-api-access-jbfgd") pod "fbe13262-f7ed-4311-bf73-c2dd21ef3733" (UID: "fbe13262-f7ed-4311-bf73-c2dd21ef3733"). InnerVolumeSpecName "kube-api-access-jbfgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.570111 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" podStartSLOduration=8.570091713 podStartE2EDuration="8.570091713s" podCreationTimestamp="2026-01-22 11:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:58:42.567038484 +0000 UTC m=+230.145154319" watchObservedRunningTime="2026-01-22 11:58:42.570091713 +0000 UTC m=+230.148207548" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.580407 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.582677 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fbe13262-f7ed-4311-bf73-c2dd21ef3733" (UID: "fbe13262-f7ed-4311-bf73-c2dd21ef3733"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.584832 4773 scope.go:117] "RemoveContainer" containerID="fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.622497 4773 scope.go:117] "RemoveContainer" containerID="ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.638537 4773 scope.go:117] "RemoveContainer" containerID="3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f" Jan 22 11:58:42 crc kubenswrapper[4773]: E0122 11:58:42.642821 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f\": container with ID starting with 3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f not found: ID does not exist" containerID="3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.642887 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f"} err="failed to get container status \"3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f\": rpc error: code = NotFound desc = could not find container \"3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f\": container with ID starting with 3e6b6181aab96a0c24b1c40b010aa21ebc1b1f30243fb84c8b6691b93f89645f not found: ID does not exist" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.642922 4773 scope.go:117] "RemoveContainer" containerID="fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446" Jan 22 11:58:42 crc kubenswrapper[4773]: E0122 11:58:42.643473 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446\": container with ID starting with fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446 not found: ID does not exist" containerID="fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.643517 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446"} err="failed to get container status \"fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446\": rpc error: code = NotFound desc = could not find container \"fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446\": container with ID starting with fd7eaea32a621ffeb0ee30c9fd5d38e85bbd775e7f7dd869f3c11ab572875446 not found: ID does not exist" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.643536 4773 scope.go:117] "RemoveContainer" containerID="ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57" Jan 22 11:58:42 crc kubenswrapper[4773]: E0122 11:58:42.643761 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57\": container with ID starting with ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57 not found: ID does not exist" containerID="ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.643806 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57"} err="failed to get container status \"ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57\": rpc error: code = NotFound desc = could not find container \"ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57\": container with ID starting with ca663e7a2cb1b343b17a2ee8ff27547dd5717d61d207d0345650a032015a8c57 not found: ID does not exist" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.652327 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.652356 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbfgd\" (UniqueName: \"kubernetes.io/projected/fbe13262-f7ed-4311-bf73-c2dd21ef3733-kube-api-access-jbfgd\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.652396 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fbe13262-f7ed-4311-bf73-c2dd21ef3733-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.973896 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zvhf9"] Jan 22 11:58:42 crc kubenswrapper[4773]: I0122 11:58:42.981245 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zvhf9"] Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.405804 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vfkxp"] Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.406381 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vfkxp" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="registry-server" containerID="cri-o://a4cfd3cb69a3d9bb684fcb81df94582025d315d689175025889b9a93923b2b9b" gracePeriod=2 Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.560364 4773 generic.go:334] "Generic (PLEG): container finished" podID="44223378-fd96-41e8-9189-558b02fb1d2d" containerID="a4cfd3cb69a3d9bb684fcb81df94582025d315d689175025889b9a93923b2b9b" exitCode=0 Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.560437 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vfkxp" event={"ID":"44223378-fd96-41e8-9189-558b02fb1d2d","Type":"ContainerDied","Data":"a4cfd3cb69a3d9bb684fcb81df94582025d315d689175025889b9a93923b2b9b"} Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.831832 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.862803 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-utilities\") pod \"44223378-fd96-41e8-9189-558b02fb1d2d\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.862875 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-catalog-content\") pod \"44223378-fd96-41e8-9189-558b02fb1d2d\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.862920 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v9ht\" (UniqueName: \"kubernetes.io/projected/44223378-fd96-41e8-9189-558b02fb1d2d-kube-api-access-5v9ht\") pod \"44223378-fd96-41e8-9189-558b02fb1d2d\" (UID: \"44223378-fd96-41e8-9189-558b02fb1d2d\") " Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.863659 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-utilities" (OuterVolumeSpecName: "utilities") pod "44223378-fd96-41e8-9189-558b02fb1d2d" (UID: "44223378-fd96-41e8-9189-558b02fb1d2d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.867483 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44223378-fd96-41e8-9189-558b02fb1d2d-kube-api-access-5v9ht" (OuterVolumeSpecName: "kube-api-access-5v9ht") pod "44223378-fd96-41e8-9189-558b02fb1d2d" (UID: "44223378-fd96-41e8-9189-558b02fb1d2d"). InnerVolumeSpecName "kube-api-access-5v9ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.964071 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.964119 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v9ht\" (UniqueName: \"kubernetes.io/projected/44223378-fd96-41e8-9189-558b02fb1d2d-kube-api-access-5v9ht\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:43 crc kubenswrapper[4773]: I0122 11:58:43.978971 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "44223378-fd96-41e8-9189-558b02fb1d2d" (UID: "44223378-fd96-41e8-9189-558b02fb1d2d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.064955 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44223378-fd96-41e8-9189-558b02fb1d2d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.569949 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vfkxp" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.569951 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vfkxp" event={"ID":"44223378-fd96-41e8-9189-558b02fb1d2d","Type":"ContainerDied","Data":"7812948d3b5807150e8279fe5bac0853eca2d4513efb7e9b63ede5f4143d2668"} Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.570239 4773 scope.go:117] "RemoveContainer" containerID="a4cfd3cb69a3d9bb684fcb81df94582025d315d689175025889b9a93923b2b9b" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.588143 4773 scope.go:117] "RemoveContainer" containerID="7eeb7e594ab3502daae4f46ef9240936aa7d59abfbe5f749a3b4b37706af1a31" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.602417 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vfkxp"] Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.602498 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vfkxp"] Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.623610 4773 scope.go:117] "RemoveContainer" containerID="0d1ef56f0939ce050cc94f8f7be77bb067980a3501f051c1d06b949b40e20358" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.666551 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" path="/var/lib/kubelet/pods/44223378-fd96-41e8-9189-558b02fb1d2d/volumes" Jan 22 11:58:44 crc kubenswrapper[4773]: I0122 11:58:44.667133 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" path="/var/lib/kubelet/pods/fbe13262-f7ed-4311-bf73-c2dd21ef3733/volumes" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901061 4773 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901842 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="extract-content" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901855 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="extract-content" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901894 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="extract-utilities" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901902 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="extract-utilities" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901910 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="extract-utilities" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901915 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="extract-utilities" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901929 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="extract-content" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901936 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="extract-content" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901944 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="extract-utilities" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901950 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="extract-utilities" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901962 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901968 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901974 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901980 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.901992 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.901999 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.902015 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="extract-content" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.902024 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="extract-content" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.902375 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe13262-f7ed-4311-bf73-c2dd21ef3733" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.902395 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="11240379-c011-45ef-8626-f59a8772a5ce" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.902410 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="44223378-fd96-41e8-9189-558b02fb1d2d" containerName="registry-server" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.902822 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.904351 4773 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.904691 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592" gracePeriod=15 Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.904741 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828" gracePeriod=15 Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.904772 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b" gracePeriod=15 Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.904783 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56" gracePeriod=15 Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.904792 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b" gracePeriod=15 Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907339 4773 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907697 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907727 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907742 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907751 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907761 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907768 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907777 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907784 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907794 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907801 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907816 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907823 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 11:58:48 crc kubenswrapper[4773]: E0122 11:58:48.907835 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.907844 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.908102 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.908125 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.908137 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.908148 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.908161 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 11:58:48 crc kubenswrapper[4773]: I0122 11:58:48.908170 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.026499 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.026592 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.026777 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.026934 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.027041 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.127839 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128213 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.127955 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128270 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128322 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128365 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128376 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128419 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128497 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128547 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.128574 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.229660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.229726 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.229749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.229793 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.229828 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.229793 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:49 crc kubenswrapper[4773]: E0122 11:58:49.449664 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:49Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:49Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:49Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:49Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:27cf3abbf8fd467e0024e29f4a1590ade73c4e616041027fc414be0d345fbddc\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:61565de83851ce1a60a7f5484dc89d16992896eb24005c0196eed44fc53d8e6a\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1671130350},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:0934f30eb8f9333151bdb8fb7ad24fe19bb186a20d28b0541182f909fb8f0145\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:dac313fa046b5a0965a26ce6996a51a0a3a77668fdbe4a5e5beea707e8024a2f\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202844902},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b72e40c5d5b36b681f40c16ebf3dcac6520ed0c79f174ba87f673ab7afd209a\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:d83ee77ad07e06451a84205ac4c85c69e912a1c975e1a8a95095d79218028dce\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:f5cc57bade9e356b6af4211c07e49cde20c7cb921769b00c2cf9bf1a17bf76fc\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:f6f94e2a83937ff48dd2dc14f55325f6ee2d688985dc375d44cb7ae105f593d7\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1169599210},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: E0122 11:58:49.450096 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: E0122 11:58:49.450486 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: E0122 11:58:49.450778 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: E0122 11:58:49.450942 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: E0122 11:58:49.450954 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.600863 4773 generic.go:334] "Generic (PLEG): container finished" podID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" containerID="78e4428fc53257f229a6dc671d3f3bfd258be98b0ae80ee2a21a57a16d09aeee" exitCode=0 Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.600937 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c","Type":"ContainerDied","Data":"78e4428fc53257f229a6dc671d3f3bfd258be98b0ae80ee2a21a57a16d09aeee"} Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.601982 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.602382 4773 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.603766 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.605137 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.605774 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828" exitCode=0 Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.605888 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56" exitCode=0 Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.605971 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b" exitCode=0 Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.606056 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b" exitCode=2 Jan 22 11:58:49 crc kubenswrapper[4773]: I0122 11:58:49.605841 4773 scope.go:117] "RemoveContainer" containerID="40a5d1580f80970121c6646df0ed93d33eeb8a7d98a5f0ccc486c995c8fe0fa8" Jan 22 11:58:50 crc kubenswrapper[4773]: I0122 11:58:50.614628 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.147493 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.148763 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.252190 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-var-lock\") pod \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.252367 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kubelet-dir\") pod \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.252409 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kube-api-access\") pod \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\" (UID: \"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c\") " Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.252989 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-var-lock" (OuterVolumeSpecName: "var-lock") pod "59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" (UID: "59e7aa07-30e5-4e9b-9cd8-f80054e9d41c"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.253038 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" (UID: "59e7aa07-30e5-4e9b-9cd8-f80054e9d41c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.257990 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" (UID: "59e7aa07-30e5-4e9b-9cd8-f80054e9d41c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.289938 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.290521 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.291039 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.291198 4773 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.353617 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.353654 4773 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.353669 4773 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/59e7aa07-30e5-4e9b-9cd8-f80054e9d41c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455083 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455211 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455217 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455245 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455271 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455345 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455549 4773 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455563 4773 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.455573 4773 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.623892 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.625368 4773 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592" exitCode=0 Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.625484 4773 scope.go:117] "RemoveContainer" containerID="b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.625525 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.627643 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"59e7aa07-30e5-4e9b-9cd8-f80054e9d41c","Type":"ContainerDied","Data":"2eb383b986d6a3f2d70bb97abb3ae2eb219c7234ae6ec5b6eca0e1f31e520cfc"} Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.627677 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eb383b986d6a3f2d70bb97abb3ae2eb219c7234ae6ec5b6eca0e1f31e520cfc" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.627730 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.642126 4773 scope.go:117] "RemoveContainer" containerID="85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.646260 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.646471 4773 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.646749 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.646952 4773 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.656277 4773 scope.go:117] "RemoveContainer" containerID="666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.669222 4773 scope.go:117] "RemoveContainer" containerID="cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.684031 4773 scope.go:117] "RemoveContainer" containerID="6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.701690 4773 scope.go:117] "RemoveContainer" containerID="cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.725321 4773 scope.go:117] "RemoveContainer" containerID="b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828" Jan 22 11:58:51 crc kubenswrapper[4773]: E0122 11:58:51.725834 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\": container with ID starting with b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828 not found: ID does not exist" containerID="b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.725876 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828"} err="failed to get container status \"b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\": rpc error: code = NotFound desc = could not find container \"b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828\": container with ID starting with b382c94d310616518d2769fd4545dca185f68f0260ad8cafb3810c15863a9828 not found: ID does not exist" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.725904 4773 scope.go:117] "RemoveContainer" containerID="85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56" Jan 22 11:58:51 crc kubenswrapper[4773]: E0122 11:58:51.726578 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\": container with ID starting with 85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56 not found: ID does not exist" containerID="85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.726614 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56"} err="failed to get container status \"85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\": rpc error: code = NotFound desc = could not find container \"85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56\": container with ID starting with 85ece709c04d7d29749e684076ef1612f2c7ebd39ed6bd667bc4800b94953a56 not found: ID does not exist" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.726637 4773 scope.go:117] "RemoveContainer" containerID="666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b" Jan 22 11:58:51 crc kubenswrapper[4773]: E0122 11:58:51.727133 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\": container with ID starting with 666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b not found: ID does not exist" containerID="666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.727164 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b"} err="failed to get container status \"666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\": rpc error: code = NotFound desc = could not find container \"666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b\": container with ID starting with 666c9df977166d50cf1f96e5cb3e7164c560fb75f815b9353d2018d47e7d9c9b not found: ID does not exist" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.727183 4773 scope.go:117] "RemoveContainer" containerID="cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b" Jan 22 11:58:51 crc kubenswrapper[4773]: E0122 11:58:51.727532 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\": container with ID starting with cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b not found: ID does not exist" containerID="cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.727567 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b"} err="failed to get container status \"cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\": rpc error: code = NotFound desc = could not find container \"cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b\": container with ID starting with cca8061fc72f74680269aea54936f8a78bfe597000ead357788dcd731e64f46b not found: ID does not exist" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.727590 4773 scope.go:117] "RemoveContainer" containerID="6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592" Jan 22 11:58:51 crc kubenswrapper[4773]: E0122 11:58:51.727996 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\": container with ID starting with 6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592 not found: ID does not exist" containerID="6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.728031 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592"} err="failed to get container status \"6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\": rpc error: code = NotFound desc = could not find container \"6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592\": container with ID starting with 6f83d87c4d597cbc4f9f9618dd4739fb15885cccb9786339358dca142e752592 not found: ID does not exist" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.728050 4773 scope.go:117] "RemoveContainer" containerID="cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b" Jan 22 11:58:51 crc kubenswrapper[4773]: E0122 11:58:51.728463 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\": container with ID starting with cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b not found: ID does not exist" containerID="cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b" Jan 22 11:58:51 crc kubenswrapper[4773]: I0122 11:58:51.728499 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b"} err="failed to get container status \"cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\": rpc error: code = NotFound desc = could not find container \"cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b\": container with ID starting with cc78e5ccfa64f3f4be8ae29edcfc33d7a4642a1073b9889e3ec2693c3e26c19b not found: ID does not exist" Jan 22 11:58:52 crc kubenswrapper[4773]: I0122 11:58:52.661214 4773 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:52 crc kubenswrapper[4773]: I0122 11:58:52.662022 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:52 crc kubenswrapper[4773]: I0122 11:58:52.671011 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 22 11:58:53 crc kubenswrapper[4773]: E0122 11:58:53.938463 4773 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.97:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:53 crc kubenswrapper[4773]: I0122 11:58:53.938992 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:53 crc kubenswrapper[4773]: W0122 11:58:53.961950 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-4d15a404f938166324858f59a2e06aeae8cced9bff98ad02e77cf4b8d6d4d638 WatchSource:0}: Error finding container 4d15a404f938166324858f59a2e06aeae8cced9bff98ad02e77cf4b8d6d4d638: Status 404 returned error can't find the container with id 4d15a404f938166324858f59a2e06aeae8cced9bff98ad02e77cf4b8d6d4d638 Jan 22 11:58:53 crc kubenswrapper[4773]: E0122 11:58:53.965609 4773 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.97:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d0bbcc21f4d23 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 11:58:53.965061411 +0000 UTC m=+241.543177236,LastTimestamp:2026-01-22 11:58:53.965061411 +0000 UTC m=+241.543177236,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 11:58:54 crc kubenswrapper[4773]: I0122 11:58:54.652001 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f"} Jan 22 11:58:54 crc kubenswrapper[4773]: I0122 11:58:54.652063 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"4d15a404f938166324858f59a2e06aeae8cced9bff98ad02e77cf4b8d6d4d638"} Jan 22 11:58:54 crc kubenswrapper[4773]: I0122 11:58:54.652587 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:54 crc kubenswrapper[4773]: E0122 11:58:54.652712 4773 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.97:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:58:55 crc kubenswrapper[4773]: E0122 11:58:55.531253 4773 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.97:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d0bbcc21f4d23 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 11:58:53.965061411 +0000 UTC m=+241.543177236,LastTimestamp:2026-01-22 11:58:53.965061411 +0000 UTC m=+241.543177236,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 11:58:58 crc kubenswrapper[4773]: E0122 11:58:58.883631 4773 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:58 crc kubenswrapper[4773]: E0122 11:58:58.884434 4773 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:58 crc kubenswrapper[4773]: E0122 11:58:58.884729 4773 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:58 crc kubenswrapper[4773]: E0122 11:58:58.884999 4773 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:58 crc kubenswrapper[4773]: E0122 11:58:58.885199 4773 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:58 crc kubenswrapper[4773]: I0122 11:58:58.885218 4773 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 22 11:58:58 crc kubenswrapper[4773]: E0122 11:58:58.885468 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="200ms" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.086953 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="400ms" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.488106 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="800ms" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.720421 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:59Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:59Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:59Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T11:58:59Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:27cf3abbf8fd467e0024e29f4a1590ade73c4e616041027fc414be0d345fbddc\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:61565de83851ce1a60a7f5484dc89d16992896eb24005c0196eed44fc53d8e6a\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1671130350},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:0934f30eb8f9333151bdb8fb7ad24fe19bb186a20d28b0541182f909fb8f0145\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:dac313fa046b5a0965a26ce6996a51a0a3a77668fdbe4a5e5beea707e8024a2f\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202844902},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b72e40c5d5b36b681f40c16ebf3dcac6520ed0c79f174ba87f673ab7afd209a\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:d83ee77ad07e06451a84205ac4c85c69e912a1c975e1a8a95095d79218028dce\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:f5cc57bade9e356b6af4211c07e49cde20c7cb921769b00c2cf9bf1a17bf76fc\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:f6f94e2a83937ff48dd2dc14f55325f6ee2d688985dc375d44cb7ae105f593d7\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1169599210},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.721058 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.721406 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.721994 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.722541 4773 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:58:59 crc kubenswrapper[4773]: E0122 11:58:59.722560 4773 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 11:59:00 crc kubenswrapper[4773]: E0122 11:59:00.289912 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="1.6s" Jan 22 11:59:00 crc kubenswrapper[4773]: I0122 11:59:00.657833 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:00 crc kubenswrapper[4773]: I0122 11:59:00.659373 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:59:00 crc kubenswrapper[4773]: I0122 11:59:00.672197 4773 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:00 crc kubenswrapper[4773]: I0122 11:59:00.672226 4773 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:00 crc kubenswrapper[4773]: E0122 11:59:00.672463 4773 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:00 crc kubenswrapper[4773]: I0122 11:59:00.672844 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.690804 4773 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="46eb932fcc9689c1291ed0bd524b0238d3c59ca7c92862c5ed8842795453c1fb" exitCode=0 Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.690979 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"46eb932fcc9689c1291ed0bd524b0238d3c59ca7c92862c5ed8842795453c1fb"} Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.691488 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"830a69d86ccbbbda7b64df2e4220e217645cb799fe7899155cf761bcdcb88d20"} Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.692175 4773 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.692200 4773 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:01 crc kubenswrapper[4773]: E0122 11:59:01.692702 4773 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.692728 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.696045 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.696117 4773 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa" exitCode=1 Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.696156 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa"} Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.696904 4773 scope.go:117] "RemoveContainer" containerID="eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.697042 4773 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:59:01 crc kubenswrapper[4773]: I0122 11:59:01.697435 4773 status_manager.go:851] "Failed to get status for pod" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.97:6443: connect: connection refused" Jan 22 11:59:01 crc kubenswrapper[4773]: E0122 11:59:01.890786 4773 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.97:6443: connect: connection refused" interval="3.2s" Jan 22 11:59:02 crc kubenswrapper[4773]: I0122 11:59:02.722462 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 11:59:02 crc kubenswrapper[4773]: I0122 11:59:02.722549 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"49dfd5d1636b97f6d8e6578fef0f0e3409e7191ec32c5849a67938c3c15585e5"} Jan 22 11:59:02 crc kubenswrapper[4773]: I0122 11:59:02.730519 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"70b601cbefb9926f5087c953039726962266a27d957b054f24360e4b0db09e39"} Jan 22 11:59:02 crc kubenswrapper[4773]: I0122 11:59:02.730563 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"458dee2c9a178e04e116e81ed0e31543eec039b246ed3a54c9bb07459bd6b733"} Jan 22 11:59:02 crc kubenswrapper[4773]: I0122 11:59:02.730573 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8f575c93258166b533acfc3dedbc14dbd5cfa2b3ea60bdc4d8a19ffe8d35a258"} Jan 22 11:59:02 crc kubenswrapper[4773]: I0122 11:59:02.730581 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"04a71cdb9ca7af4e8f1e706a47eee51dfdb82b6245385df1b47c07d278293fb8"} Jan 22 11:59:03 crc kubenswrapper[4773]: I0122 11:59:03.738968 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d72ea04c814a51f686ff4821fce0ccd6c3b7258a28b85eae5c860d2c976e96b6"} Jan 22 11:59:03 crc kubenswrapper[4773]: I0122 11:59:03.739340 4773 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:03 crc kubenswrapper[4773]: I0122 11:59:03.739357 4773 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:03 crc kubenswrapper[4773]: I0122 11:59:03.739616 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:05 crc kubenswrapper[4773]: I0122 11:59:05.673952 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:05 crc kubenswrapper[4773]: I0122 11:59:05.674306 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:05 crc kubenswrapper[4773]: I0122 11:59:05.679966 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:06 crc kubenswrapper[4773]: I0122 11:59:06.301777 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:59:06 crc kubenswrapper[4773]: I0122 11:59:06.302025 4773 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 22 11:59:06 crc kubenswrapper[4773]: I0122 11:59:06.302088 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 22 11:59:08 crc kubenswrapper[4773]: I0122 11:59:08.082903 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:59:08 crc kubenswrapper[4773]: I0122 11:59:08.749624 4773 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:09 crc kubenswrapper[4773]: I0122 11:59:09.768022 4773 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:09 crc kubenswrapper[4773]: I0122 11:59:09.768055 4773 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:09 crc kubenswrapper[4773]: I0122 11:59:09.773843 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:09 crc kubenswrapper[4773]: I0122 11:59:09.777599 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="81d8f818-df4f-4097-b5d7-b5b89e24d773" Jan 22 11:59:10 crc kubenswrapper[4773]: I0122 11:59:10.772383 4773 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:10 crc kubenswrapper[4773]: I0122 11:59:10.772713 4773 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="839760ed-cfeb-44fa-b15d-0a8c26187acc" Jan 22 11:59:12 crc kubenswrapper[4773]: I0122 11:59:12.671878 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="81d8f818-df4f-4097-b5d7-b5b89e24d773" Jan 22 11:59:16 crc kubenswrapper[4773]: I0122 11:59:16.302404 4773 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 22 11:59:16 crc kubenswrapper[4773]: I0122 11:59:16.303127 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 22 11:59:18 crc kubenswrapper[4773]: I0122 11:59:18.111558 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 11:59:18 crc kubenswrapper[4773]: I0122 11:59:18.767009 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 11:59:19 crc kubenswrapper[4773]: I0122 11:59:19.506905 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 11:59:19 crc kubenswrapper[4773]: I0122 11:59:19.625112 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.080766 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.201867 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.506929 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.591735 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.614979 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.683377 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.821105 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 11:59:20 crc kubenswrapper[4773]: I0122 11:59:20.987870 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.253663 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.271316 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.338602 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.338807 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.486674 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.494464 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.601758 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.640030 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.748233 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.748492 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 11:59:21 crc kubenswrapper[4773]: I0122 11:59:21.967356 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 11:59:22 crc kubenswrapper[4773]: I0122 11:59:22.257172 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 11:59:22 crc kubenswrapper[4773]: I0122 11:59:22.639416 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 11:59:22 crc kubenswrapper[4773]: I0122 11:59:22.916444 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 11:59:22 crc kubenswrapper[4773]: I0122 11:59:22.945254 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.029267 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.056321 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.151560 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.168497 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.280362 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.289214 4773 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.289861 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.331896 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.428195 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.470002 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.490308 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.541169 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.547972 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.552779 4773 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.554104 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.643510 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.794268 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 11:59:23 crc kubenswrapper[4773]: I0122 11:59:23.867154 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.058417 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.105562 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.117038 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.288595 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.490586 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.578191 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.589708 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.613948 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.627962 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.798966 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.868427 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 11:59:24 crc kubenswrapper[4773]: I0122 11:59:24.954007 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.016587 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.087304 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.092375 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.152662 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.203620 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.210148 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.242840 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.252333 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.281873 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.297098 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.365119 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.394961 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.397133 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.440412 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.605785 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.737592 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.750391 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.777917 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.881371 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.894885 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.924803 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 11:59:25 crc kubenswrapper[4773]: I0122 11:59:25.957180 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.002055 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.020887 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.064834 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.112694 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.301679 4773 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.301760 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.301833 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.302661 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"49dfd5d1636b97f6d8e6578fef0f0e3409e7191ec32c5849a67938c3c15585e5"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.302846 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://49dfd5d1636b97f6d8e6578fef0f0e3409e7191ec32c5849a67938c3c15585e5" gracePeriod=30 Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.398532 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.508458 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.618329 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.648315 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.735822 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.794889 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.815063 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.818410 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.839237 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.872615 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.958850 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 11:59:26 crc kubenswrapper[4773]: I0122 11:59:26.989170 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.000519 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.096080 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.192975 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.217697 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.283349 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.289723 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.302577 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.354971 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.405451 4773 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.416869 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.470842 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.510674 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.576704 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.649375 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.676092 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.775424 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.775686 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.794136 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.802137 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.804379 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.850750 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.865396 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.916509 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.948399 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 11:59:27 crc kubenswrapper[4773]: I0122 11:59:27.971256 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.104581 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.104590 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.132613 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.236345 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.247111 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.283924 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.319215 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.453056 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.693481 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.706372 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.760159 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.795670 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.811234 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.814140 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.821344 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.882052 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.901141 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.910910 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.919097 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.941373 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.956518 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.973333 4773 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.977604 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.977660 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.977678 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dfvhr","openshift-marketplace/community-operators-cf7rt","openshift-marketplace/redhat-marketplace-bwxfc","openshift-marketplace/certified-operators-s9kn2","openshift-marketplace/marketplace-operator-79b997595-tnzhm"] Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.977893 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" containerID="cri-o://edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9" gracePeriod=30 Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.978382 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dfvhr" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="registry-server" containerID="cri-o://822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2" gracePeriod=30 Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.978593 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cf7rt" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="registry-server" containerID="cri-o://5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a" gracePeriod=30 Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.978728 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bwxfc" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="registry-server" containerID="cri-o://e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e" gracePeriod=30 Jan 22 11:59:28 crc kubenswrapper[4773]: I0122 11:59:28.978787 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s9kn2" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="registry-server" containerID="cri-o://a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70" gracePeriod=30 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.010336 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.022545 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.025093 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.026565 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.026547262 podStartE2EDuration="21.026547262s" podCreationTimestamp="2026-01-22 11:59:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 11:59:29.023956474 +0000 UTC m=+276.602072299" watchObservedRunningTime="2026-01-22 11:59:29.026547262 +0000 UTC m=+276.604663087" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.059800 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.162532 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.199987 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.247770 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.348944 4773 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.460392 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.477034 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.481016 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.486317 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.500333 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505603 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics\") pod \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505730 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca\") pod \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505796 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzvsl\" (UniqueName: \"kubernetes.io/projected/4b093dc7-d5ee-409a-9c1c-e003686d44e4-kube-api-access-pzvsl\") pod \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\" (UID: \"4b093dc7-d5ee-409a-9c1c-e003686d44e4\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505824 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-catalog-content\") pod \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505861 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-catalog-content\") pod \"ce66454f-39e9-4aac-9887-987e15252181\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505907 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-utilities\") pod \"ce66454f-39e9-4aac-9887-987e15252181\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505946 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p99s\" (UniqueName: \"kubernetes.io/projected/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-kube-api-access-8p99s\") pod \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.505982 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-utilities\") pod \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\" (UID: \"c4d3a6db-50cb-4809-ac76-edb0a9d949a1\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.506005 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpttn\" (UniqueName: \"kubernetes.io/projected/ce66454f-39e9-4aac-9887-987e15252181-kube-api-access-cpttn\") pod \"ce66454f-39e9-4aac-9887-987e15252181\" (UID: \"ce66454f-39e9-4aac-9887-987e15252181\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.507570 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "4b093dc7-d5ee-409a-9c1c-e003686d44e4" (UID: "4b093dc7-d5ee-409a-9c1c-e003686d44e4"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.508079 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-utilities" (OuterVolumeSpecName: "utilities") pod "c4d3a6db-50cb-4809-ac76-edb0a9d949a1" (UID: "c4d3a6db-50cb-4809-ac76-edb0a9d949a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.511575 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-utilities" (OuterVolumeSpecName: "utilities") pod "ce66454f-39e9-4aac-9887-987e15252181" (UID: "ce66454f-39e9-4aac-9887-987e15252181"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.512832 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce66454f-39e9-4aac-9887-987e15252181-kube-api-access-cpttn" (OuterVolumeSpecName: "kube-api-access-cpttn") pod "ce66454f-39e9-4aac-9887-987e15252181" (UID: "ce66454f-39e9-4aac-9887-987e15252181"). InnerVolumeSpecName "kube-api-access-cpttn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.512945 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-kube-api-access-8p99s" (OuterVolumeSpecName: "kube-api-access-8p99s") pod "c4d3a6db-50cb-4809-ac76-edb0a9d949a1" (UID: "c4d3a6db-50cb-4809-ac76-edb0a9d949a1"). InnerVolumeSpecName "kube-api-access-8p99s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.513828 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b093dc7-d5ee-409a-9c1c-e003686d44e4-kube-api-access-pzvsl" (OuterVolumeSpecName: "kube-api-access-pzvsl") pod "4b093dc7-d5ee-409a-9c1c-e003686d44e4" (UID: "4b093dc7-d5ee-409a-9c1c-e003686d44e4"). InnerVolumeSpecName "kube-api-access-pzvsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.513814 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "4b093dc7-d5ee-409a-9c1c-e003686d44e4" (UID: "4b093dc7-d5ee-409a-9c1c-e003686d44e4"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.565973 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4d3a6db-50cb-4809-ac76-edb0a9d949a1" (UID: "c4d3a6db-50cb-4809-ac76-edb0a9d949a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.571685 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.571804 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.575135 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.581893 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.584706 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce66454f-39e9-4aac-9887-987e15252181" (UID: "ce66454f-39e9-4aac-9887-987e15252181"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.596115 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.606676 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-utilities\") pod \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.606765 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnd5q\" (UniqueName: \"kubernetes.io/projected/be427c4b-854d-442d-86b1-19ee1d69814e-kube-api-access-rnd5q\") pod \"be427c4b-854d-442d-86b1-19ee1d69814e\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.606818 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhclv\" (UniqueName: \"kubernetes.io/projected/fc3f7a11-33be-4f89-abf4-62f1f13ad823-kube-api-access-jhclv\") pod \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.606923 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-catalog-content\") pod \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\" (UID: \"fc3f7a11-33be-4f89-abf4-62f1f13ad823\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.606960 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-utilities\") pod \"be427c4b-854d-442d-86b1-19ee1d69814e\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607110 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-catalog-content\") pod \"be427c4b-854d-442d-86b1-19ee1d69814e\" (UID: \"be427c4b-854d-442d-86b1-19ee1d69814e\") " Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607556 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607583 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p99s\" (UniqueName: \"kubernetes.io/projected/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-kube-api-access-8p99s\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-utilities" (OuterVolumeSpecName: "utilities") pod "fc3f7a11-33be-4f89-abf4-62f1f13ad823" (UID: "fc3f7a11-33be-4f89-abf4-62f1f13ad823"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607630 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607655 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpttn\" (UniqueName: \"kubernetes.io/projected/ce66454f-39e9-4aac-9887-987e15252181-kube-api-access-cpttn\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607683 4773 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607693 4773 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b093dc7-d5ee-409a-9c1c-e003686d44e4-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607702 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzvsl\" (UniqueName: \"kubernetes.io/projected/4b093dc7-d5ee-409a-9c1c-e003686d44e4-kube-api-access-pzvsl\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607710 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4d3a6db-50cb-4809-ac76-edb0a9d949a1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.607718 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce66454f-39e9-4aac-9887-987e15252181-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.608369 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-utilities" (OuterVolumeSpecName: "utilities") pod "be427c4b-854d-442d-86b1-19ee1d69814e" (UID: "be427c4b-854d-442d-86b1-19ee1d69814e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.609815 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be427c4b-854d-442d-86b1-19ee1d69814e-kube-api-access-rnd5q" (OuterVolumeSpecName: "kube-api-access-rnd5q") pod "be427c4b-854d-442d-86b1-19ee1d69814e" (UID: "be427c4b-854d-442d-86b1-19ee1d69814e"). InnerVolumeSpecName "kube-api-access-rnd5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.610331 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc3f7a11-33be-4f89-abf4-62f1f13ad823-kube-api-access-jhclv" (OuterVolumeSpecName: "kube-api-access-jhclv") pod "fc3f7a11-33be-4f89-abf4-62f1f13ad823" (UID: "fc3f7a11-33be-4f89-abf4-62f1f13ad823"). InnerVolumeSpecName "kube-api-access-jhclv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.632248 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.639676 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be427c4b-854d-442d-86b1-19ee1d69814e" (UID: "be427c4b-854d-442d-86b1-19ee1d69814e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.709136 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.709213 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.709227 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnd5q\" (UniqueName: \"kubernetes.io/projected/be427c4b-854d-442d-86b1-19ee1d69814e-kube-api-access-rnd5q\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.709240 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhclv\" (UniqueName: \"kubernetes.io/projected/fc3f7a11-33be-4f89-abf4-62f1f13ad823-kube-api-access-jhclv\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.709253 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be427c4b-854d-442d-86b1-19ee1d69814e-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.712322 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.718704 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.739141 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc3f7a11-33be-4f89-abf4-62f1f13ad823" (UID: "fc3f7a11-33be-4f89-abf4-62f1f13ad823"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.756980 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.789823 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.810952 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc3f7a11-33be-4f89-abf4-62f1f13ad823-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.821010 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.852567 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.873747 4773 generic.go:334] "Generic (PLEG): container finished" podID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerID="edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9" exitCode=0 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.873793 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" event={"ID":"4b093dc7-d5ee-409a-9c1c-e003686d44e4","Type":"ContainerDied","Data":"edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.873843 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" event={"ID":"4b093dc7-d5ee-409a-9c1c-e003686d44e4","Type":"ContainerDied","Data":"c0c80447e29c19ebbec6f07be0636efba9a47132450d26ca0426501175cea1d9"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.873863 4773 scope.go:117] "RemoveContainer" containerID="edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.873802 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.876840 4773 generic.go:334] "Generic (PLEG): container finished" podID="be427c4b-854d-442d-86b1-19ee1d69814e" containerID="e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e" exitCode=0 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.876922 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bwxfc" event={"ID":"be427c4b-854d-442d-86b1-19ee1d69814e","Type":"ContainerDied","Data":"e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.876950 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bwxfc" event={"ID":"be427c4b-854d-442d-86b1-19ee1d69814e","Type":"ContainerDied","Data":"3e6fa17fa287403f9f79ffeb163c0a0ab074a513c6ebb754c6577f700af83f9c"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.876993 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bwxfc" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.880178 4773 generic.go:334] "Generic (PLEG): container finished" podID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerID="822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2" exitCode=0 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.880236 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvhr" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.880258 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerDied","Data":"822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.880303 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvhr" event={"ID":"fc3f7a11-33be-4f89-abf4-62f1f13ad823","Type":"ContainerDied","Data":"496031f0617503d12839fed7b7d30c20da3f8ccd415138e771a26452f2cdec12"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.884229 4773 generic.go:334] "Generic (PLEG): container finished" podID="ce66454f-39e9-4aac-9887-987e15252181" containerID="5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a" exitCode=0 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.884360 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf7rt" event={"ID":"ce66454f-39e9-4aac-9887-987e15252181","Type":"ContainerDied","Data":"5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.884390 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cf7rt" event={"ID":"ce66454f-39e9-4aac-9887-987e15252181","Type":"ContainerDied","Data":"93bf5e16fd966159eb39bccb38a600e392f611e5c06245597802da79da2ed27b"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.884456 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cf7rt" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.889330 4773 generic.go:334] "Generic (PLEG): container finished" podID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerID="a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70" exitCode=0 Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.889361 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerDied","Data":"a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.889392 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9kn2" event={"ID":"c4d3a6db-50cb-4809-ac76-edb0a9d949a1","Type":"ContainerDied","Data":"ce52184bb0243b576769de9b67dc16cc453924a2c3dd9785079c2580e8dccf54"} Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.889443 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9kn2" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.893157 4773 scope.go:117] "RemoveContainer" containerID="edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9" Jan 22 11:59:29 crc kubenswrapper[4773]: E0122 11:59:29.893487 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9\": container with ID starting with edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9 not found: ID does not exist" containerID="edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.893619 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9"} err="failed to get container status \"edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9\": rpc error: code = NotFound desc = could not find container \"edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9\": container with ID starting with edf531b173a947c692a696caa8882bda7f42a3f57ac24f563d1348713214b2f9 not found: ID does not exist" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.893760 4773 scope.go:117] "RemoveContainer" containerID="e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.903830 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tnzhm"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.907353 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tnzhm"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.914739 4773 scope.go:117] "RemoveContainer" containerID="b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.914771 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dfvhr"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.924964 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dfvhr"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.928659 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s9kn2"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.934855 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s9kn2"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.941845 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bwxfc"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.946641 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bwxfc"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.950694 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cf7rt"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.950940 4773 scope.go:117] "RemoveContainer" containerID="c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.963039 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cf7rt"] Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.967210 4773 scope.go:117] "RemoveContainer" containerID="e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e" Jan 22 11:59:29 crc kubenswrapper[4773]: E0122 11:59:29.967652 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e\": container with ID starting with e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e not found: ID does not exist" containerID="e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.967692 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e"} err="failed to get container status \"e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e\": rpc error: code = NotFound desc = could not find container \"e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e\": container with ID starting with e69f40cd0af4f5aade28a8494d9fe3931576f6ad9506b6efac93669285ee9e2e not found: ID does not exist" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.967724 4773 scope.go:117] "RemoveContainer" containerID="b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b" Jan 22 11:59:29 crc kubenswrapper[4773]: E0122 11:59:29.968055 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b\": container with ID starting with b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b not found: ID does not exist" containerID="b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.968084 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b"} err="failed to get container status \"b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b\": rpc error: code = NotFound desc = could not find container \"b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b\": container with ID starting with b7af6c902a8041dc520299f455cfc07339ee9b0ec1578f1763ebdd2a20ce074b not found: ID does not exist" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.968107 4773 scope.go:117] "RemoveContainer" containerID="c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328" Jan 22 11:59:29 crc kubenswrapper[4773]: E0122 11:59:29.968354 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328\": container with ID starting with c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328 not found: ID does not exist" containerID="c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.968387 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328"} err="failed to get container status \"c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328\": rpc error: code = NotFound desc = could not find container \"c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328\": container with ID starting with c39444875f047470d82435aee5f50f4e05962e81f57fc53101598942da355328 not found: ID does not exist" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.968408 4773 scope.go:117] "RemoveContainer" containerID="822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.980374 4773 scope.go:117] "RemoveContainer" containerID="e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4" Jan 22 11:59:29 crc kubenswrapper[4773]: I0122 11:59:29.999716 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.000258 4773 scope.go:117] "RemoveContainer" containerID="61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.003443 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.015126 4773 scope.go:117] "RemoveContainer" containerID="822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.015708 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2\": container with ID starting with 822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2 not found: ID does not exist" containerID="822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.015762 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2"} err="failed to get container status \"822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2\": rpc error: code = NotFound desc = could not find container \"822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2\": container with ID starting with 822ab159fdfcb964b2bd0790e913ff65ba684b63eba1a9bff1efe0fbd713d1e2 not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.015795 4773 scope.go:117] "RemoveContainer" containerID="e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.016330 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4\": container with ID starting with e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4 not found: ID does not exist" containerID="e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.016360 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4"} err="failed to get container status \"e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4\": rpc error: code = NotFound desc = could not find container \"e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4\": container with ID starting with e15c007f331e242c3e5ee9488875d308e67876e9110be201ea553c6dc2022fa4 not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.016387 4773 scope.go:117] "RemoveContainer" containerID="61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.016704 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f\": container with ID starting with 61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f not found: ID does not exist" containerID="61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.016725 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f"} err="failed to get container status \"61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f\": rpc error: code = NotFound desc = could not find container \"61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f\": container with ID starting with 61901523bb1789714c72f838e583da57677cb051a129614d7145b78393c5961f not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.016745 4773 scope.go:117] "RemoveContainer" containerID="5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.026269 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.028738 4773 scope.go:117] "RemoveContainer" containerID="28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.046756 4773 scope.go:117] "RemoveContainer" containerID="b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.060938 4773 scope.go:117] "RemoveContainer" containerID="5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.062060 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a\": container with ID starting with 5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a not found: ID does not exist" containerID="5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.062104 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a"} err="failed to get container status \"5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a\": rpc error: code = NotFound desc = could not find container \"5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a\": container with ID starting with 5b19a86913d1a6cf02c8b7079ca0552552145d6b68436ef94e7910202daab85a not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.062136 4773 scope.go:117] "RemoveContainer" containerID="28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.062577 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c\": container with ID starting with 28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c not found: ID does not exist" containerID="28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.062624 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c"} err="failed to get container status \"28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c\": rpc error: code = NotFound desc = could not find container \"28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c\": container with ID starting with 28d24b07f0edd42766c4a96fa695b2500c47162c0abdb5ef1c2e3fc144a0506c not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.062661 4773 scope.go:117] "RemoveContainer" containerID="b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.063033 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35\": container with ID starting with b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35 not found: ID does not exist" containerID="b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.063077 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35"} err="failed to get container status \"b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35\": rpc error: code = NotFound desc = could not find container \"b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35\": container with ID starting with b15c3239bfe824503bb47f29123a0b818c64f9fb695c849fc48317ace9cb2f35 not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.063101 4773 scope.go:117] "RemoveContainer" containerID="a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.064335 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.077709 4773 scope.go:117] "RemoveContainer" containerID="f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.130980 4773 scope.go:117] "RemoveContainer" containerID="e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.143905 4773 scope.go:117] "RemoveContainer" containerID="a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.144228 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70\": container with ID starting with a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70 not found: ID does not exist" containerID="a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.144260 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70"} err="failed to get container status \"a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70\": rpc error: code = NotFound desc = could not find container \"a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70\": container with ID starting with a9ce7fd1f6dba8f6bfc286f790e5fd0707b4e25764f738e7176b776bd3154f70 not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.144296 4773 scope.go:117] "RemoveContainer" containerID="f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.144749 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7\": container with ID starting with f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7 not found: ID does not exist" containerID="f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.144801 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7"} err="failed to get container status \"f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7\": rpc error: code = NotFound desc = could not find container \"f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7\": container with ID starting with f6ec90961018306ad0b59694bca11d548a1d7cfad475e070054166f0dc94e1e7 not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.144832 4773 scope.go:117] "RemoveContainer" containerID="e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338" Jan 22 11:59:30 crc kubenswrapper[4773]: E0122 11:59:30.145149 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338\": container with ID starting with e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338 not found: ID does not exist" containerID="e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.145173 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338"} err="failed to get container status \"e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338\": rpc error: code = NotFound desc = could not find container \"e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338\": container with ID starting with e955f895c05c8bbd32a9cb9fb095e597e242f605feee88aae6835aa5e5b2b338 not found: ID does not exist" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.152807 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.169134 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.169857 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.202789 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.342431 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.348095 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.354328 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.436401 4773 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tnzhm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.436484 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tnzhm" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.26:8080/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.514128 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.522664 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.639046 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.666258 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" path="/var/lib/kubelet/pods/4b093dc7-d5ee-409a-9c1c-e003686d44e4/volumes" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.667501 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" path="/var/lib/kubelet/pods/be427c4b-854d-442d-86b1-19ee1d69814e/volumes" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.668391 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" path="/var/lib/kubelet/pods/c4d3a6db-50cb-4809-ac76-edb0a9d949a1/volumes" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.669693 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce66454f-39e9-4aac-9887-987e15252181" path="/var/lib/kubelet/pods/ce66454f-39e9-4aac-9887-987e15252181/volumes" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.670263 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" path="/var/lib/kubelet/pods/fc3f7a11-33be-4f89-abf4-62f1f13ad823/volumes" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.677426 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.706854 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.750689 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.785157 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.810434 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.923797 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.972027 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.980798 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 11:59:30 crc kubenswrapper[4773]: I0122 11:59:30.985716 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.036328 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.119603 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.127739 4773 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.127982 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f" gracePeriod=5 Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.203855 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.205872 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.231662 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.247206 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.254242 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.279538 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.288474 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.371475 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.439231 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.474347 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.508308 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.551076 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.565863 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.574183 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.594354 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.695147 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.707527 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.713369 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.735228 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.914440 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 11:59:31 crc kubenswrapper[4773]: I0122 11:59:31.942036 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.366674 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.408080 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.426058 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.445844 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.459695 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.529869 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.574303 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.742527 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.742917 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.762498 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.801642 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 11:59:32 crc kubenswrapper[4773]: I0122 11:59:32.902597 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.020446 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.037820 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.097444 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.184696 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.294358 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.519735 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.524960 4773 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.575429 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.851524 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.927120 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.946711 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 11:59:33 crc kubenswrapper[4773]: I0122 11:59:33.980254 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.014200 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.165529 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.166227 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.269387 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.303499 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.548966 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.849348 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 11:59:34 crc kubenswrapper[4773]: I0122 11:59:34.915726 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 11:59:35 crc kubenswrapper[4773]: I0122 11:59:35.080217 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 11:59:35 crc kubenswrapper[4773]: I0122 11:59:35.188101 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 11:59:35 crc kubenswrapper[4773]: I0122 11:59:35.226581 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 11:59:35 crc kubenswrapper[4773]: I0122 11:59:35.553509 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 11:59:35 crc kubenswrapper[4773]: I0122 11:59:35.947675 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.204228 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.246512 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.384202 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.706874 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.707679 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905146 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905236 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905297 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905362 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905402 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905416 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905452 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905506 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.905543 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.906013 4773 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.906316 4773 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.906331 4773 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.906344 4773 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.913009 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.933929 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.933986 4773 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f" exitCode=137 Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.934036 4773 scope.go:117] "RemoveContainer" containerID="9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.934237 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.962046 4773 scope.go:117] "RemoveContainer" containerID="9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f" Jan 22 11:59:36 crc kubenswrapper[4773]: E0122 11:59:36.962700 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f\": container with ID starting with 9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f not found: ID does not exist" containerID="9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.962746 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f"} err="failed to get container status \"9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f\": rpc error: code = NotFound desc = could not find container \"9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f\": container with ID starting with 9539caa1087a83a7cf3aac1b526b0b8eb9b11ddde95e1352fcb29e340ecb6b9f not found: ID does not exist" Jan 22 11:59:36 crc kubenswrapper[4773]: I0122 11:59:36.977675 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 11:59:37 crc kubenswrapper[4773]: I0122 11:59:37.007390 4773 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 11:59:37 crc kubenswrapper[4773]: I0122 11:59:37.192649 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 11:59:37 crc kubenswrapper[4773]: I0122 11:59:37.615239 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 11:59:38 crc kubenswrapper[4773]: I0122 11:59:38.665053 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 22 11:59:52 crc kubenswrapper[4773]: I0122 11:59:52.549879 4773 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 22 11:59:57 crc kubenswrapper[4773]: I0122 11:59:57.029826 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 22 11:59:57 crc kubenswrapper[4773]: I0122 11:59:57.032898 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 11:59:57 crc kubenswrapper[4773]: I0122 11:59:57.032939 4773 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="49dfd5d1636b97f6d8e6578fef0f0e3409e7191ec32c5849a67938c3c15585e5" exitCode=137 Jan 22 11:59:57 crc kubenswrapper[4773]: I0122 11:59:57.032964 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"49dfd5d1636b97f6d8e6578fef0f0e3409e7191ec32c5849a67938c3c15585e5"} Jan 22 11:59:57 crc kubenswrapper[4773]: I0122 11:59:57.032993 4773 scope.go:117] "RemoveContainer" containerID="eadded4fb35731e401dc1a3a128084833dc36790590abb4327cb5303cba757aa" Jan 22 11:59:58 crc kubenswrapper[4773]: I0122 11:59:58.049694 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 22 11:59:58 crc kubenswrapper[4773]: I0122 11:59:58.050878 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4cab28f9237f0160a6ebaeede79dd79af0d42c3376bc3f3fbf3afa1550736ed2"} Jan 22 11:59:58 crc kubenswrapper[4773]: I0122 11:59:58.083166 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.403788 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x8gvd"] Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404307 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404320 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404329 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404335 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404347 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" containerName="installer" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404353 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" containerName="installer" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404360 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404365 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404373 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404379 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404385 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404390 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404400 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404406 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404413 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404418 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404426 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404432 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404439 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404444 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404451 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404457 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404465 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404470 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404480 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404485 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404496 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404501 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="extract-content" Jan 22 11:59:59 crc kubenswrapper[4773]: E0122 11:59:59.404512 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404520 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="extract-utilities" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404600 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc3f7a11-33be-4f89-abf4-62f1f13ad823" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404609 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4d3a6db-50cb-4809-ac76-edb0a9d949a1" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404621 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e7aa07-30e5-4e9b-9cd8-f80054e9d41c" containerName="installer" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404629 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="be427c4b-854d-442d-86b1-19ee1d69814e" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404635 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce66454f-39e9-4aac-9887-987e15252181" containerName="registry-server" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404642 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b093dc7-d5ee-409a-9c1c-e003686d44e4" containerName="marketplace-operator" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.404649 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.405332 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.407909 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.408202 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.408950 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.413146 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x8gvd"] Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.588459 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b5e27c7-c979-4e6e-bf18-56d144b4256a-utilities\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.588532 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b5e27c7-c979-4e6e-bf18-56d144b4256a-catalog-content\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.588699 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4t68\" (UniqueName: \"kubernetes.io/projected/9b5e27c7-c979-4e6e-bf18-56d144b4256a-kube-api-access-q4t68\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.602381 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wg84l"] Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.603340 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.605631 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.621246 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wg84l"] Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.689307 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80014b89-9e88-4fd8-a019-619d5e6382aa-catalog-content\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.689383 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4t68\" (UniqueName: \"kubernetes.io/projected/9b5e27c7-c979-4e6e-bf18-56d144b4256a-kube-api-access-q4t68\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.689618 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80014b89-9e88-4fd8-a019-619d5e6382aa-utilities\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.689674 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnqgq\" (UniqueName: \"kubernetes.io/projected/80014b89-9e88-4fd8-a019-619d5e6382aa-kube-api-access-xnqgq\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.689737 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b5e27c7-c979-4e6e-bf18-56d144b4256a-utilities\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.689785 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b5e27c7-c979-4e6e-bf18-56d144b4256a-catalog-content\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.690331 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b5e27c7-c979-4e6e-bf18-56d144b4256a-utilities\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.690628 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b5e27c7-c979-4e6e-bf18-56d144b4256a-catalog-content\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.712482 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4t68\" (UniqueName: \"kubernetes.io/projected/9b5e27c7-c979-4e6e-bf18-56d144b4256a-kube-api-access-q4t68\") pod \"redhat-operators-x8gvd\" (UID: \"9b5e27c7-c979-4e6e-bf18-56d144b4256a\") " pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.722826 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.790610 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80014b89-9e88-4fd8-a019-619d5e6382aa-utilities\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.790660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnqgq\" (UniqueName: \"kubernetes.io/projected/80014b89-9e88-4fd8-a019-619d5e6382aa-kube-api-access-xnqgq\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.790700 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80014b89-9e88-4fd8-a019-619d5e6382aa-catalog-content\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.791554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80014b89-9e88-4fd8-a019-619d5e6382aa-catalog-content\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.803628 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80014b89-9e88-4fd8-a019-619d5e6382aa-utilities\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.814691 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnqgq\" (UniqueName: \"kubernetes.io/projected/80014b89-9e88-4fd8-a019-619d5e6382aa-kube-api-access-xnqgq\") pod \"redhat-marketplace-wg84l\" (UID: \"80014b89-9e88-4fd8-a019-619d5e6382aa\") " pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 11:59:59 crc kubenswrapper[4773]: I0122 11:59:59.936768 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 12:00:00 crc kubenswrapper[4773]: I0122 12:00:00.151996 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x8gvd"] Jan 22 12:00:00 crc kubenswrapper[4773]: W0122 12:00:00.161355 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b5e27c7_c979_4e6e_bf18_56d144b4256a.slice/crio-e808c939fd588948c5e8d63e0b1f0ffae6e03d31ec31b01a6b59ab833921b044 WatchSource:0}: Error finding container e808c939fd588948c5e8d63e0b1f0ffae6e03d31ec31b01a6b59ab833921b044: Status 404 returned error can't find the container with id e808c939fd588948c5e8d63e0b1f0ffae6e03d31ec31b01a6b59ab833921b044 Jan 22 12:00:00 crc kubenswrapper[4773]: I0122 12:00:00.379013 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wg84l"] Jan 22 12:00:00 crc kubenswrapper[4773]: W0122 12:00:00.436040 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80014b89_9e88_4fd8_a019_619d5e6382aa.slice/crio-5a4f3ee577d5a88325717aba9f9714a2fd67b55047a2db03da942e67163d6be0 WatchSource:0}: Error finding container 5a4f3ee577d5a88325717aba9f9714a2fd67b55047a2db03da942e67163d6be0: Status 404 returned error can't find the container with id 5a4f3ee577d5a88325717aba9f9714a2fd67b55047a2db03da942e67163d6be0 Jan 22 12:00:01 crc kubenswrapper[4773]: I0122 12:00:01.072428 4773 generic.go:334] "Generic (PLEG): container finished" podID="80014b89-9e88-4fd8-a019-619d5e6382aa" containerID="046b078ac46a6857b8102bea7fed75f3f85dd954b70430f2ec889dda8b448c52" exitCode=0 Jan 22 12:00:01 crc kubenswrapper[4773]: I0122 12:00:01.072501 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wg84l" event={"ID":"80014b89-9e88-4fd8-a019-619d5e6382aa","Type":"ContainerDied","Data":"046b078ac46a6857b8102bea7fed75f3f85dd954b70430f2ec889dda8b448c52"} Jan 22 12:00:01 crc kubenswrapper[4773]: I0122 12:00:01.072532 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wg84l" event={"ID":"80014b89-9e88-4fd8-a019-619d5e6382aa","Type":"ContainerStarted","Data":"5a4f3ee577d5a88325717aba9f9714a2fd67b55047a2db03da942e67163d6be0"} Jan 22 12:00:01 crc kubenswrapper[4773]: I0122 12:00:01.074097 4773 generic.go:334] "Generic (PLEG): container finished" podID="9b5e27c7-c979-4e6e-bf18-56d144b4256a" containerID="0595addfd12bd41ceee3738be8171a3a855b2eee2e12958c0a0aa840cf266f39" exitCode=0 Jan 22 12:00:01 crc kubenswrapper[4773]: I0122 12:00:01.074138 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8gvd" event={"ID":"9b5e27c7-c979-4e6e-bf18-56d144b4256a","Type":"ContainerDied","Data":"0595addfd12bd41ceee3738be8171a3a855b2eee2e12958c0a0aa840cf266f39"} Jan 22 12:00:01 crc kubenswrapper[4773]: I0122 12:00:01.074165 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8gvd" event={"ID":"9b5e27c7-c979-4e6e-bf18-56d144b4256a","Type":"ContainerStarted","Data":"e808c939fd588948c5e8d63e0b1f0ffae6e03d31ec31b01a6b59ab833921b044"} Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.002630 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g2cq4"] Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.003974 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.005739 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.053628 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g2cq4"] Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.120175 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksfbp\" (UniqueName: \"kubernetes.io/projected/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-kube-api-access-ksfbp\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.120249 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-utilities\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.120374 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-catalog-content\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.198949 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9xq9p"] Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.200093 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.202091 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.207357 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xq9p"] Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254245 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-utilities\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254355 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-utilities\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254392 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-catalog-content\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254420 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-catalog-content\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254492 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pvc2\" (UniqueName: \"kubernetes.io/projected/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-kube-api-access-7pvc2\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254526 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksfbp\" (UniqueName: \"kubernetes.io/projected/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-kube-api-access-ksfbp\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254867 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-catalog-content\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.254907 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-utilities\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.288918 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksfbp\" (UniqueName: \"kubernetes.io/projected/d8e60f79-505e-4b43-b8eb-7e04eb7f567d-kube-api-access-ksfbp\") pod \"certified-operators-g2cq4\" (UID: \"d8e60f79-505e-4b43-b8eb-7e04eb7f567d\") " pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.320474 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.356081 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pvc2\" (UniqueName: \"kubernetes.io/projected/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-kube-api-access-7pvc2\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.356150 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-utilities\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.356199 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-catalog-content\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.356857 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-utilities\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.357063 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-catalog-content\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.373123 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pvc2\" (UniqueName: \"kubernetes.io/projected/402b8ef2-9974-4dc0-bf8f-1259c87a71b7-kube-api-access-7pvc2\") pod \"community-operators-9xq9p\" (UID: \"402b8ef2-9974-4dc0-bf8f-1259c87a71b7\") " pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.545041 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g2cq4"] Jan 22 12:00:02 crc kubenswrapper[4773]: W0122 12:00:02.558269 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8e60f79_505e_4b43_b8eb_7e04eb7f567d.slice/crio-3ea6a4f177caea3a13242764af81d53c1e4f91dfdef73b5f357bf1ec8cddd361 WatchSource:0}: Error finding container 3ea6a4f177caea3a13242764af81d53c1e4f91dfdef73b5f357bf1ec8cddd361: Status 404 returned error can't find the container with id 3ea6a4f177caea3a13242764af81d53c1e4f91dfdef73b5f357bf1ec8cddd361 Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.560322 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:02 crc kubenswrapper[4773]: I0122 12:00:02.956006 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xq9p"] Jan 22 12:00:03 crc kubenswrapper[4773]: I0122 12:00:03.085854 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xq9p" event={"ID":"402b8ef2-9974-4dc0-bf8f-1259c87a71b7","Type":"ContainerStarted","Data":"def1af64d26e01a0d076fdb752e0a0eb8621dd59379b515c2a439556e7b05ecf"} Jan 22 12:00:03 crc kubenswrapper[4773]: I0122 12:00:03.085904 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xq9p" event={"ID":"402b8ef2-9974-4dc0-bf8f-1259c87a71b7","Type":"ContainerStarted","Data":"c2e46b7874389f2c20fefea83aeb443783485bd235c00c1b39ce385294d99a58"} Jan 22 12:00:03 crc kubenswrapper[4773]: I0122 12:00:03.087078 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8e60f79-505e-4b43-b8eb-7e04eb7f567d" containerID="2b36d6e057916636b40fa148cda423bb05bd640c5006f59a972130aa4ba99be6" exitCode=0 Jan 22 12:00:03 crc kubenswrapper[4773]: I0122 12:00:03.087119 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2cq4" event={"ID":"d8e60f79-505e-4b43-b8eb-7e04eb7f567d","Type":"ContainerDied","Data":"2b36d6e057916636b40fa148cda423bb05bd640c5006f59a972130aa4ba99be6"} Jan 22 12:00:03 crc kubenswrapper[4773]: I0122 12:00:03.087143 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2cq4" event={"ID":"d8e60f79-505e-4b43-b8eb-7e04eb7f567d","Type":"ContainerStarted","Data":"3ea6a4f177caea3a13242764af81d53c1e4f91dfdef73b5f357bf1ec8cddd361"} Jan 22 12:00:04 crc kubenswrapper[4773]: I0122 12:00:04.093699 4773 generic.go:334] "Generic (PLEG): container finished" podID="402b8ef2-9974-4dc0-bf8f-1259c87a71b7" containerID="def1af64d26e01a0d076fdb752e0a0eb8621dd59379b515c2a439556e7b05ecf" exitCode=0 Jan 22 12:00:04 crc kubenswrapper[4773]: I0122 12:00:04.093743 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xq9p" event={"ID":"402b8ef2-9974-4dc0-bf8f-1259c87a71b7","Type":"ContainerDied","Data":"def1af64d26e01a0d076fdb752e0a0eb8621dd59379b515c2a439556e7b05ecf"} Jan 22 12:00:06 crc kubenswrapper[4773]: I0122 12:00:06.108632 4773 generic.go:334] "Generic (PLEG): container finished" podID="402b8ef2-9974-4dc0-bf8f-1259c87a71b7" containerID="628521748d44d109de91946ccb11047a9c4b19e76ace821bb307d18dcca4a38f" exitCode=0 Jan 22 12:00:06 crc kubenswrapper[4773]: I0122 12:00:06.108771 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xq9p" event={"ID":"402b8ef2-9974-4dc0-bf8f-1259c87a71b7","Type":"ContainerDied","Data":"628521748d44d109de91946ccb11047a9c4b19e76ace821bb307d18dcca4a38f"} Jan 22 12:00:06 crc kubenswrapper[4773]: I0122 12:00:06.111693 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8e60f79-505e-4b43-b8eb-7e04eb7f567d" containerID="b42fc8f13b35831318ce1a553c375e0e38031845a9bfd1f0fc9c1922df39ed63" exitCode=0 Jan 22 12:00:06 crc kubenswrapper[4773]: I0122 12:00:06.111756 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2cq4" event={"ID":"d8e60f79-505e-4b43-b8eb-7e04eb7f567d","Type":"ContainerDied","Data":"b42fc8f13b35831318ce1a553c375e0e38031845a9bfd1f0fc9c1922df39ed63"} Jan 22 12:00:06 crc kubenswrapper[4773]: I0122 12:00:06.302113 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 12:00:06 crc kubenswrapper[4773]: I0122 12:00:06.306121 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 12:00:07 crc kubenswrapper[4773]: I0122 12:00:07.121163 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xq9p" event={"ID":"402b8ef2-9974-4dc0-bf8f-1259c87a71b7","Type":"ContainerStarted","Data":"3f8ec7470f2ffb9c8a419348cf6ab96bddb043188b133f980db12a42e884b33b"} Jan 22 12:00:07 crc kubenswrapper[4773]: I0122 12:00:07.125204 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2cq4" event={"ID":"d8e60f79-505e-4b43-b8eb-7e04eb7f567d","Type":"ContainerStarted","Data":"dfa5fcc5cb807aa0e04e14e3ddd265f3ef4ec1925283c7e9208ead7e1d3f71f4"} Jan 22 12:00:07 crc kubenswrapper[4773]: I0122 12:00:07.131298 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 12:00:07 crc kubenswrapper[4773]: I0122 12:00:07.149199 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9xq9p" podStartSLOduration=2.4844846 podStartE2EDuration="5.149178916s" podCreationTimestamp="2026-01-22 12:00:02 +0000 UTC" firstStartedPulling="2026-01-22 12:00:04.095448309 +0000 UTC m=+311.673564134" lastFinishedPulling="2026-01-22 12:00:06.760142615 +0000 UTC m=+314.338258450" observedRunningTime="2026-01-22 12:00:07.145889158 +0000 UTC m=+314.724004983" watchObservedRunningTime="2026-01-22 12:00:07.149178916 +0000 UTC m=+314.727294741" Jan 22 12:00:07 crc kubenswrapper[4773]: I0122 12:00:07.170561 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g2cq4" podStartSLOduration=2.471154121 podStartE2EDuration="6.170539686s" podCreationTimestamp="2026-01-22 12:00:01 +0000 UTC" firstStartedPulling="2026-01-22 12:00:03.08913415 +0000 UTC m=+310.667249975" lastFinishedPulling="2026-01-22 12:00:06.788519715 +0000 UTC m=+314.366635540" observedRunningTime="2026-01-22 12:00:07.168938158 +0000 UTC m=+314.747053983" watchObservedRunningTime="2026-01-22 12:00:07.170539686 +0000 UTC m=+314.748655511" Jan 22 12:00:08 crc kubenswrapper[4773]: I0122 12:00:08.130536 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8gvd" event={"ID":"9b5e27c7-c979-4e6e-bf18-56d144b4256a","Type":"ContainerStarted","Data":"43b83b3081fdb1f50002598eebd2e5709637368dd99a2459b32b956103982668"} Jan 22 12:00:09 crc kubenswrapper[4773]: I0122 12:00:09.137327 4773 generic.go:334] "Generic (PLEG): container finished" podID="9b5e27c7-c979-4e6e-bf18-56d144b4256a" containerID="43b83b3081fdb1f50002598eebd2e5709637368dd99a2459b32b956103982668" exitCode=0 Jan 22 12:00:09 crc kubenswrapper[4773]: I0122 12:00:09.137374 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8gvd" event={"ID":"9b5e27c7-c979-4e6e-bf18-56d144b4256a","Type":"ContainerDied","Data":"43b83b3081fdb1f50002598eebd2e5709637368dd99a2459b32b956103982668"} Jan 22 12:00:10 crc kubenswrapper[4773]: I0122 12:00:10.145113 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8gvd" event={"ID":"9b5e27c7-c979-4e6e-bf18-56d144b4256a","Type":"ContainerStarted","Data":"a10df1f5caf5bfaa04d1c596f84b6203d1b563d1820afb3c94832e68794d665b"} Jan 22 12:00:10 crc kubenswrapper[4773]: I0122 12:00:10.164032 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x8gvd" podStartSLOduration=2.6403190690000002 podStartE2EDuration="11.16401504s" podCreationTimestamp="2026-01-22 11:59:59 +0000 UTC" firstStartedPulling="2026-01-22 12:00:01.076000628 +0000 UTC m=+308.654116453" lastFinishedPulling="2026-01-22 12:00:09.599696599 +0000 UTC m=+317.177812424" observedRunningTime="2026-01-22 12:00:10.162482134 +0000 UTC m=+317.740597979" watchObservedRunningTime="2026-01-22 12:00:10.16401504 +0000 UTC m=+317.742130865" Jan 22 12:00:12 crc kubenswrapper[4773]: I0122 12:00:12.320848 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:12 crc kubenswrapper[4773]: I0122 12:00:12.320919 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:12 crc kubenswrapper[4773]: I0122 12:00:12.372336 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:12 crc kubenswrapper[4773]: I0122 12:00:12.560696 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:12 crc kubenswrapper[4773]: I0122 12:00:12.560796 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:12 crc kubenswrapper[4773]: I0122 12:00:12.595526 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:13 crc kubenswrapper[4773]: I0122 12:00:13.196215 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g2cq4" Jan 22 12:00:13 crc kubenswrapper[4773]: I0122 12:00:13.205693 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9xq9p" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.205498 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw22t"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.206703 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.208892 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.209120 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.210437 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.211246 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.214706 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.214942 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.218197 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.219168 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.221180 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw22t"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.239477 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwwct"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.256277 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.256769 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" podUID="8a82d4a4-46ea-49de-89bb-9e8057ca5487" containerName="controller-manager" containerID="cri-o://2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f" gracePeriod=30 Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.383446 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab754af-86ec-4c82-bccc-4f773da62193-secret-volume\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.383754 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab754af-86ec-4c82-bccc-4f773da62193-config-volume\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.383797 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6jvk\" (UniqueName: \"kubernetes.io/projected/6ab754af-86ec-4c82-bccc-4f773da62193-kube-api-access-m6jvk\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.383826 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7xlb\" (UniqueName: \"kubernetes.io/projected/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-kube-api-access-l7xlb\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.383857 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.383882 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.387691 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw"] Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.387915 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" podUID="7e5c856b-d1a2-49d3-ba56-58711763552c" containerName="route-controller-manager" containerID="cri-o://617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1" gracePeriod=30 Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.485236 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6jvk\" (UniqueName: \"kubernetes.io/projected/6ab754af-86ec-4c82-bccc-4f773da62193-kube-api-access-m6jvk\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.485309 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7xlb\" (UniqueName: \"kubernetes.io/projected/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-kube-api-access-l7xlb\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.485338 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.485364 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.485445 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab754af-86ec-4c82-bccc-4f773da62193-secret-volume\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.485486 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab754af-86ec-4c82-bccc-4f773da62193-config-volume\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.486915 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab754af-86ec-4c82-bccc-4f773da62193-config-volume\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.487504 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.492986 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab754af-86ec-4c82-bccc-4f773da62193-secret-volume\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.493395 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.503656 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7xlb\" (UniqueName: \"kubernetes.io/projected/6e54030d-7ce7-4a13-b4a3-e67889e7c22d-kube-api-access-l7xlb\") pod \"marketplace-operator-79b997595-mw22t\" (UID: \"6e54030d-7ce7-4a13-b4a3-e67889e7c22d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.516346 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6jvk\" (UniqueName: \"kubernetes.io/projected/6ab754af-86ec-4c82-bccc-4f773da62193-kube-api-access-m6jvk\") pod \"collect-profiles-29484720-tt5zk\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.530190 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:18 crc kubenswrapper[4773]: I0122 12:00:18.540451 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.070451 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk"] Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.197659 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mw22t"] Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.201988 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" event={"ID":"6ab754af-86ec-4c82-bccc-4f773da62193","Type":"ContainerStarted","Data":"36916759bcb7724eb9b99d959153b4f2d5f861b1eeaf9039bc1501700f99fcc4"} Jan 22 12:00:19 crc kubenswrapper[4773]: W0122 12:00:19.207192 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e54030d_7ce7_4a13_b4a3_e67889e7c22d.slice/crio-34cc135b248c3172a0153841d06b0c418702c5eeeb5e97bfc5ada4548cfafa48 WatchSource:0}: Error finding container 34cc135b248c3172a0153841d06b0c418702c5eeeb5e97bfc5ada4548cfafa48: Status 404 returned error can't find the container with id 34cc135b248c3172a0153841d06b0c418702c5eeeb5e97bfc5ada4548cfafa48 Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.429641 4773 patch_prober.go:28] interesting pod/route-controller-manager-7967db8687-l68jw container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.429900 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" podUID="7e5c856b-d1a2-49d3-ba56-58711763552c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.730699 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.730742 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.790695 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.917984 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.922312 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.947176 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-779c5db457-5mwb9"] Jan 22 12:00:19 crc kubenswrapper[4773]: E0122 12:00:19.947440 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e5c856b-d1a2-49d3-ba56-58711763552c" containerName="route-controller-manager" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.947461 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e5c856b-d1a2-49d3-ba56-58711763552c" containerName="route-controller-manager" Jan 22 12:00:19 crc kubenswrapper[4773]: E0122 12:00:19.947477 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a82d4a4-46ea-49de-89bb-9e8057ca5487" containerName="controller-manager" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.947484 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a82d4a4-46ea-49de-89bb-9e8057ca5487" containerName="controller-manager" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.947578 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a82d4a4-46ea-49de-89bb-9e8057ca5487" containerName="controller-manager" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.947591 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e5c856b-d1a2-49d3-ba56-58711763552c" containerName="route-controller-manager" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.947910 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:19 crc kubenswrapper[4773]: I0122 12:00:19.977179 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-779c5db457-5mwb9"] Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.010891 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-proxy-ca-bundles\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.010945 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-serving-cert\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.010993 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-config\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.011016 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-client-ca\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.011035 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7m87\" (UniqueName: \"kubernetes.io/projected/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-kube-api-access-g7m87\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.111578 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-config\") pod \"7e5c856b-d1a2-49d3-ba56-58711763552c\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.111906 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a82d4a4-46ea-49de-89bb-9e8057ca5487-serving-cert\") pod \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.111935 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pslg7\" (UniqueName: \"kubernetes.io/projected/8a82d4a4-46ea-49de-89bb-9e8057ca5487-kube-api-access-pslg7\") pod \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.111957 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-client-ca\") pod \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.111985 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hwcm\" (UniqueName: \"kubernetes.io/projected/7e5c856b-d1a2-49d3-ba56-58711763552c-kube-api-access-5hwcm\") pod \"7e5c856b-d1a2-49d3-ba56-58711763552c\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112006 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-client-ca\") pod \"7e5c856b-d1a2-49d3-ba56-58711763552c\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112049 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-proxy-ca-bundles\") pod \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112077 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-config\") pod \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\" (UID: \"8a82d4a4-46ea-49de-89bb-9e8057ca5487\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112128 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e5c856b-d1a2-49d3-ba56-58711763552c-serving-cert\") pod \"7e5c856b-d1a2-49d3-ba56-58711763552c\" (UID: \"7e5c856b-d1a2-49d3-ba56-58711763552c\") " Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112248 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-proxy-ca-bundles\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112304 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-serving-cert\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112333 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-config\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112356 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-client-ca\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112380 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7m87\" (UniqueName: \"kubernetes.io/projected/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-kube-api-access-g7m87\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-client-ca" (OuterVolumeSpecName: "client-ca") pod "8a82d4a4-46ea-49de-89bb-9e8057ca5487" (UID: "8a82d4a4-46ea-49de-89bb-9e8057ca5487"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.112526 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-config" (OuterVolumeSpecName: "config") pod "7e5c856b-d1a2-49d3-ba56-58711763552c" (UID: "7e5c856b-d1a2-49d3-ba56-58711763552c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.113727 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-client-ca\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.114057 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-config\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.114720 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-proxy-ca-bundles\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.114882 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8a82d4a4-46ea-49de-89bb-9e8057ca5487" (UID: "8a82d4a4-46ea-49de-89bb-9e8057ca5487"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.115173 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-config" (OuterVolumeSpecName: "config") pod "8a82d4a4-46ea-49de-89bb-9e8057ca5487" (UID: "8a82d4a4-46ea-49de-89bb-9e8057ca5487"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.117588 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e5c856b-d1a2-49d3-ba56-58711763552c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7e5c856b-d1a2-49d3-ba56-58711763552c" (UID: "7e5c856b-d1a2-49d3-ba56-58711763552c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.118600 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a82d4a4-46ea-49de-89bb-9e8057ca5487-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8a82d4a4-46ea-49de-89bb-9e8057ca5487" (UID: "8a82d4a4-46ea-49de-89bb-9e8057ca5487"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.120727 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-client-ca" (OuterVolumeSpecName: "client-ca") pod "7e5c856b-d1a2-49d3-ba56-58711763552c" (UID: "7e5c856b-d1a2-49d3-ba56-58711763552c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.121429 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e5c856b-d1a2-49d3-ba56-58711763552c-kube-api-access-5hwcm" (OuterVolumeSpecName: "kube-api-access-5hwcm") pod "7e5c856b-d1a2-49d3-ba56-58711763552c" (UID: "7e5c856b-d1a2-49d3-ba56-58711763552c"). InnerVolumeSpecName "kube-api-access-5hwcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.123254 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-serving-cert\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.123424 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a82d4a4-46ea-49de-89bb-9e8057ca5487-kube-api-access-pslg7" (OuterVolumeSpecName: "kube-api-access-pslg7") pod "8a82d4a4-46ea-49de-89bb-9e8057ca5487" (UID: "8a82d4a4-46ea-49de-89bb-9e8057ca5487"). InnerVolumeSpecName "kube-api-access-pslg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.152195 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7m87\" (UniqueName: \"kubernetes.io/projected/61e309ff-fbce-4bbd-a1f4-5a9abd387a2f-kube-api-access-g7m87\") pod \"controller-manager-779c5db457-5mwb9\" (UID: \"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f\") " pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.209572 4773 generic.go:334] "Generic (PLEG): container finished" podID="6ab754af-86ec-4c82-bccc-4f773da62193" containerID="023c6615a88d66acca8896c45bcf2882eb178d099b8b0fdf5b43e9a4bddf4647" exitCode=0 Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.209647 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" event={"ID":"6ab754af-86ec-4c82-bccc-4f773da62193","Type":"ContainerDied","Data":"023c6615a88d66acca8896c45bcf2882eb178d099b8b0fdf5b43e9a4bddf4647"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.211410 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" event={"ID":"6e54030d-7ce7-4a13-b4a3-e67889e7c22d","Type":"ContainerStarted","Data":"d788c3adf1b7e9120cc6dcd9a5fcdc350a0fed19190b5668f1ccdb2d18a3c4d2"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.211444 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" event={"ID":"6e54030d-7ce7-4a13-b4a3-e67889e7c22d","Type":"ContainerStarted","Data":"34cc135b248c3172a0153841d06b0c418702c5eeeb5e97bfc5ada4548cfafa48"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.211597 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213068 4773 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213090 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213100 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e5c856b-d1a2-49d3-ba56-58711763552c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213109 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213117 4773 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a82d4a4-46ea-49de-89bb-9e8057ca5487-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213127 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pslg7\" (UniqueName: \"kubernetes.io/projected/8a82d4a4-46ea-49de-89bb-9e8057ca5487-kube-api-access-pslg7\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213136 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8a82d4a4-46ea-49de-89bb-9e8057ca5487-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213145 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hwcm\" (UniqueName: \"kubernetes.io/projected/7e5c856b-d1a2-49d3-ba56-58711763552c-kube-api-access-5hwcm\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.213152 4773 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e5c856b-d1a2-49d3-ba56-58711763552c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.214278 4773 generic.go:334] "Generic (PLEG): container finished" podID="7e5c856b-d1a2-49d3-ba56-58711763552c" containerID="617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1" exitCode=0 Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.214332 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.214313 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" event={"ID":"7e5c856b-d1a2-49d3-ba56-58711763552c","Type":"ContainerDied","Data":"617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.214396 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw" event={"ID":"7e5c856b-d1a2-49d3-ba56-58711763552c","Type":"ContainerDied","Data":"1a4efa7665d808fde935b3d94570243127a6a439057a1a49dc9b921aad5359cd"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.214416 4773 scope.go:117] "RemoveContainer" containerID="617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.217645 4773 generic.go:334] "Generic (PLEG): container finished" podID="8a82d4a4-46ea-49de-89bb-9e8057ca5487" containerID="2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f" exitCode=0 Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.217896 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.218029 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" event={"ID":"8a82d4a4-46ea-49de-89bb-9e8057ca5487","Type":"ContainerDied","Data":"2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.218076 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8" event={"ID":"8a82d4a4-46ea-49de-89bb-9e8057ca5487","Type":"ContainerDied","Data":"ddf08adc0536e8cf4731e36aa668602b6892cb2d40bd833bdc0eabb2d876733b"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.219642 4773 generic.go:334] "Generic (PLEG): container finished" podID="80014b89-9e88-4fd8-a019-619d5e6382aa" containerID="02732aed1974374d62a10881fd7ae643db0a605e328986a4a81725d4e6c28e47" exitCode=0 Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.220340 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wg84l" event={"ID":"80014b89-9e88-4fd8-a019-619d5e6382aa","Type":"ContainerDied","Data":"02732aed1974374d62a10881fd7ae643db0a605e328986a4a81725d4e6c28e47"} Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.235310 4773 scope.go:117] "RemoveContainer" containerID="617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.237669 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" Jan 22 12:00:20 crc kubenswrapper[4773]: E0122 12:00:20.241860 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1\": container with ID starting with 617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1 not found: ID does not exist" containerID="617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.241909 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1"} err="failed to get container status \"617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1\": rpc error: code = NotFound desc = could not find container \"617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1\": container with ID starting with 617f903d507fc7bc8f13ea1767d5ceebb1eb69d7a4ab496ab1f147f4ce164dd1 not found: ID does not exist" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.241941 4773 scope.go:117] "RemoveContainer" containerID="2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.266565 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw"] Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.267554 4773 scope.go:117] "RemoveContainer" containerID="2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f" Jan 22 12:00:20 crc kubenswrapper[4773]: E0122 12:00:20.267955 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f\": container with ID starting with 2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f not found: ID does not exist" containerID="2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.267992 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f"} err="failed to get container status \"2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f\": rpc error: code = NotFound desc = could not find container \"2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f\": container with ID starting with 2cba6f70f621c434bbad5b08b9aeb795d5e07ce6d67fc4de753640f0206a7c7f not found: ID does not exist" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.269384 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7967db8687-l68jw"] Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.274512 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.278067 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x8gvd" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.284133 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mw22t" podStartSLOduration=2.284113637 podStartE2EDuration="2.284113637s" podCreationTimestamp="2026-01-22 12:00:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:00:20.28175957 +0000 UTC m=+327.859875395" watchObservedRunningTime="2026-01-22 12:00:20.284113637 +0000 UTC m=+327.862229452" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.349320 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8"] Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.353386 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7c5bc7b89d-hgvp8"] Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.673037 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e5c856b-d1a2-49d3-ba56-58711763552c" path="/var/lib/kubelet/pods/7e5c856b-d1a2-49d3-ba56-58711763552c/volumes" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.674708 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a82d4a4-46ea-49de-89bb-9e8057ca5487" path="/var/lib/kubelet/pods/8a82d4a4-46ea-49de-89bb-9e8057ca5487/volumes" Jan 22 12:00:20 crc kubenswrapper[4773]: I0122 12:00:20.688784 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-779c5db457-5mwb9"] Jan 22 12:00:20 crc kubenswrapper[4773]: W0122 12:00:20.704590 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61e309ff_fbce_4bbd_a1f4_5a9abd387a2f.slice/crio-7e16835de4d27a184be111a9f2cc2a007fc6abadf69d3aa686eb844979281603 WatchSource:0}: Error finding container 7e16835de4d27a184be111a9f2cc2a007fc6abadf69d3aa686eb844979281603: Status 404 returned error can't find the container with id 7e16835de4d27a184be111a9f2cc2a007fc6abadf69d3aa686eb844979281603 Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.224728 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" event={"ID":"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f","Type":"ContainerStarted","Data":"5278c09ffa6922511308fd3842ac757d77e7a1627e6c71a8d77290be9f232949"} Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.224774 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" event={"ID":"61e309ff-fbce-4bbd-a1f4-5a9abd387a2f","Type":"ContainerStarted","Data":"7e16835de4d27a184be111a9f2cc2a007fc6abadf69d3aa686eb844979281603"} Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.224916 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.228327 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wg84l" event={"ID":"80014b89-9e88-4fd8-a019-619d5e6382aa","Type":"ContainerStarted","Data":"044a1beec4d0e159221bfa951944afab07254b9c40a68f0e5b8243cda19de23a"} Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.230607 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.248832 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-779c5db457-5mwb9" podStartSLOduration=3.248812375 podStartE2EDuration="3.248812375s" podCreationTimestamp="2026-01-22 12:00:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:00:21.244557165 +0000 UTC m=+328.822672990" watchObservedRunningTime="2026-01-22 12:00:21.248812375 +0000 UTC m=+328.826928200" Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.280597 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wg84l" podStartSLOduration=2.634402279 podStartE2EDuration="22.280578701s" podCreationTimestamp="2026-01-22 11:59:59 +0000 UTC" firstStartedPulling="2026-01-22 12:00:01.074750111 +0000 UTC m=+308.652865936" lastFinishedPulling="2026-01-22 12:00:20.720926533 +0000 UTC m=+328.299042358" observedRunningTime="2026-01-22 12:00:21.279457689 +0000 UTC m=+328.857573514" watchObservedRunningTime="2026-01-22 12:00:21.280578701 +0000 UTC m=+328.858694526" Jan 22 12:00:21 crc kubenswrapper[4773]: I0122 12:00:21.884951 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.035594 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6jvk\" (UniqueName: \"kubernetes.io/projected/6ab754af-86ec-4c82-bccc-4f773da62193-kube-api-access-m6jvk\") pod \"6ab754af-86ec-4c82-bccc-4f773da62193\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.035750 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab754af-86ec-4c82-bccc-4f773da62193-secret-volume\") pod \"6ab754af-86ec-4c82-bccc-4f773da62193\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.035810 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab754af-86ec-4c82-bccc-4f773da62193-config-volume\") pod \"6ab754af-86ec-4c82-bccc-4f773da62193\" (UID: \"6ab754af-86ec-4c82-bccc-4f773da62193\") " Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.036594 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ab754af-86ec-4c82-bccc-4f773da62193-config-volume" (OuterVolumeSpecName: "config-volume") pod "6ab754af-86ec-4c82-bccc-4f773da62193" (UID: "6ab754af-86ec-4c82-bccc-4f773da62193"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.041132 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ab754af-86ec-4c82-bccc-4f773da62193-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6ab754af-86ec-4c82-bccc-4f773da62193" (UID: "6ab754af-86ec-4c82-bccc-4f773da62193"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.042554 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ab754af-86ec-4c82-bccc-4f773da62193-kube-api-access-m6jvk" (OuterVolumeSpecName: "kube-api-access-m6jvk") pod "6ab754af-86ec-4c82-bccc-4f773da62193" (UID: "6ab754af-86ec-4c82-bccc-4f773da62193"). InnerVolumeSpecName "kube-api-access-m6jvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.137695 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab754af-86ec-4c82-bccc-4f773da62193-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.137756 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6jvk\" (UniqueName: \"kubernetes.io/projected/6ab754af-86ec-4c82-bccc-4f773da62193-kube-api-access-m6jvk\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.137770 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab754af-86ec-4c82-bccc-4f773da62193-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.234533 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.234570 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk" event={"ID":"6ab754af-86ec-4c82-bccc-4f773da62193","Type":"ContainerDied","Data":"36916759bcb7724eb9b99d959153b4f2d5f861b1eeaf9039bc1501700f99fcc4"} Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.234680 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36916759bcb7724eb9b99d959153b4f2d5f861b1eeaf9039bc1501700f99fcc4" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.627177 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv"] Jan 22 12:00:22 crc kubenswrapper[4773]: E0122 12:00:22.627624 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ab754af-86ec-4c82-bccc-4f773da62193" containerName="collect-profiles" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.627636 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ab754af-86ec-4c82-bccc-4f773da62193" containerName="collect-profiles" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.627719 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ab754af-86ec-4c82-bccc-4f773da62193" containerName="collect-profiles" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.628134 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.630679 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.630885 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.631024 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.631104 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.631328 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.633939 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.637751 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv"] Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.746757 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/554cbd98-9568-4477-8375-94f71bfce64d-client-ca\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.746852 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcvj2\" (UniqueName: \"kubernetes.io/projected/554cbd98-9568-4477-8375-94f71bfce64d-kube-api-access-zcvj2\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.746885 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/554cbd98-9568-4477-8375-94f71bfce64d-config\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.746929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/554cbd98-9568-4477-8375-94f71bfce64d-serving-cert\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.848261 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcvj2\" (UniqueName: \"kubernetes.io/projected/554cbd98-9568-4477-8375-94f71bfce64d-kube-api-access-zcvj2\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.848353 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/554cbd98-9568-4477-8375-94f71bfce64d-config\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.848604 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/554cbd98-9568-4477-8375-94f71bfce64d-serving-cert\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.848691 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/554cbd98-9568-4477-8375-94f71bfce64d-client-ca\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.849477 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/554cbd98-9568-4477-8375-94f71bfce64d-config\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.849797 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/554cbd98-9568-4477-8375-94f71bfce64d-client-ca\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.859987 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/554cbd98-9568-4477-8375-94f71bfce64d-serving-cert\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.863889 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcvj2\" (UniqueName: \"kubernetes.io/projected/554cbd98-9568-4477-8375-94f71bfce64d-kube-api-access-zcvj2\") pod \"route-controller-manager-5f9f899c86-8vqcv\" (UID: \"554cbd98-9568-4477-8375-94f71bfce64d\") " pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:22 crc kubenswrapper[4773]: I0122 12:00:22.964820 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:23 crc kubenswrapper[4773]: I0122 12:00:23.366383 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv"] Jan 22 12:00:23 crc kubenswrapper[4773]: W0122 12:00:23.374325 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod554cbd98_9568_4477_8375_94f71bfce64d.slice/crio-884a4ba45cf5daf88b061b5ca675855baab2bb17d6ff3aa5b9dfb74a37eafb40 WatchSource:0}: Error finding container 884a4ba45cf5daf88b061b5ca675855baab2bb17d6ff3aa5b9dfb74a37eafb40: Status 404 returned error can't find the container with id 884a4ba45cf5daf88b061b5ca675855baab2bb17d6ff3aa5b9dfb74a37eafb40 Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.200203 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-pftfw"] Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.201443 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.227945 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-pftfw"] Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.245695 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" event={"ID":"554cbd98-9568-4477-8375-94f71bfce64d","Type":"ContainerStarted","Data":"7e7b0cf7b9811f8eb07f7f63e62c66290aff7a2365f4b978eba41608a360c9c9"} Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.245999 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" event={"ID":"554cbd98-9568-4477-8375-94f71bfce64d","Type":"ContainerStarted","Data":"884a4ba45cf5daf88b061b5ca675855baab2bb17d6ff3aa5b9dfb74a37eafb40"} Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.246776 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.251460 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.324926 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f9f899c86-8vqcv" podStartSLOduration=6.324906215 podStartE2EDuration="6.324906215s" podCreationTimestamp="2026-01-22 12:00:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:00:24.32081819 +0000 UTC m=+331.898934015" watchObservedRunningTime="2026-01-22 12:00:24.324906215 +0000 UTC m=+331.903022040" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368225 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/43a1b7c6-c222-4bff-b25d-573f50702bd2-installation-pull-secrets\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368349 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43a1b7c6-c222-4bff-b25d-573f50702bd2-trusted-ca\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368390 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/43a1b7c6-c222-4bff-b25d-573f50702bd2-ca-trust-extracted\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368423 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368456 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-registry-tls\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368506 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-bound-sa-token\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368584 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4mlr\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-kube-api-access-p4mlr\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.368659 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/43a1b7c6-c222-4bff-b25d-573f50702bd2-registry-certificates\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.404524 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469464 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/43a1b7c6-c222-4bff-b25d-573f50702bd2-registry-certificates\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469543 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/43a1b7c6-c222-4bff-b25d-573f50702bd2-installation-pull-secrets\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469578 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43a1b7c6-c222-4bff-b25d-573f50702bd2-trusted-ca\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469609 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/43a1b7c6-c222-4bff-b25d-573f50702bd2-ca-trust-extracted\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469637 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-registry-tls\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469671 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-bound-sa-token\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.469718 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4mlr\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-kube-api-access-p4mlr\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.470617 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/43a1b7c6-c222-4bff-b25d-573f50702bd2-ca-trust-extracted\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.471858 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/43a1b7c6-c222-4bff-b25d-573f50702bd2-registry-certificates\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.471866 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43a1b7c6-c222-4bff-b25d-573f50702bd2-trusted-ca\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.475228 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-registry-tls\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.475334 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/43a1b7c6-c222-4bff-b25d-573f50702bd2-installation-pull-secrets\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.490156 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4mlr\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-kube-api-access-p4mlr\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.491012 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/43a1b7c6-c222-4bff-b25d-573f50702bd2-bound-sa-token\") pod \"image-registry-66df7c8f76-pftfw\" (UID: \"43a1b7c6-c222-4bff-b25d-573f50702bd2\") " pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:24 crc kubenswrapper[4773]: I0122 12:00:24.518097 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:25 crc kubenswrapper[4773]: I0122 12:00:25.250721 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-pftfw"] Jan 22 12:00:26 crc kubenswrapper[4773]: I0122 12:00:26.260461 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" event={"ID":"43a1b7c6-c222-4bff-b25d-573f50702bd2","Type":"ContainerStarted","Data":"1f1f717a3cbd0169e0a2a48ce531d7e6fe17bc522aa1372183389de805bb1e17"} Jan 22 12:00:26 crc kubenswrapper[4773]: I0122 12:00:26.260789 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" event={"ID":"43a1b7c6-c222-4bff-b25d-573f50702bd2","Type":"ContainerStarted","Data":"e2797f36a05787dded8240a6699aa11f56d2ea470fc7607e3b13a4fb710123d7"} Jan 22 12:00:26 crc kubenswrapper[4773]: I0122 12:00:26.283638 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" podStartSLOduration=2.283600869 podStartE2EDuration="2.283600869s" podCreationTimestamp="2026-01-22 12:00:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:00:26.277043164 +0000 UTC m=+333.855159009" watchObservedRunningTime="2026-01-22 12:00:26.283600869 +0000 UTC m=+333.861716704" Jan 22 12:00:27 crc kubenswrapper[4773]: I0122 12:00:27.268019 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:29 crc kubenswrapper[4773]: I0122 12:00:29.937071 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 12:00:29 crc kubenswrapper[4773]: I0122 12:00:29.937427 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 12:00:29 crc kubenswrapper[4773]: I0122 12:00:29.975221 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 12:00:30 crc kubenswrapper[4773]: I0122 12:00:30.320127 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wg84l" Jan 22 12:00:43 crc kubenswrapper[4773]: I0122 12:00:43.299700 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" podUID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" containerName="oauth-openshift" containerID="cri-o://eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae" gracePeriod=15 Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.215905 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.250102 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-bfb76855c-8rnfr"] Jan 22 12:00:44 crc kubenswrapper[4773]: E0122 12:00:44.250362 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" containerName="oauth-openshift" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.250377 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" containerName="oauth-openshift" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.250492 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" containerName="oauth-openshift" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251053 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251698 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251793 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251822 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xv2v\" (UniqueName: \"kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251863 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-dir\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251918 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251946 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252069 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252634 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.251978 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252818 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252832 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252876 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252905 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.252938 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.253085 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.253919 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.253124 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.254018 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") pod \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\" (UID: \"4db3dc80-10df-4d72-925f-ab5c927bc6ef\") " Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.254649 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.255459 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.255482 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.255498 4773 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.255512 4773 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4db3dc80-10df-4d72-925f-ab5c927bc6ef-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.255524 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.258774 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.258791 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v" (OuterVolumeSpecName: "kube-api-access-5xv2v") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "kube-api-access-5xv2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.260643 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.262598 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.263145 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.266373 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.266667 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.267025 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-bfb76855c-8rnfr"] Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.271235 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.272862 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4db3dc80-10df-4d72-925f-ab5c927bc6ef" (UID: "4db3dc80-10df-4d72-925f-ab5c927bc6ef"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.356936 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.356979 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-login\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357001 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/83e29e56-f54a-4296-856b-20bcd9153614-audit-dir\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357019 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-cliconfig\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357039 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-audit-policies\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357055 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-session\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357119 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-serving-cert\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357159 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357187 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq2wx\" (UniqueName: \"kubernetes.io/projected/83e29e56-f54a-4296-856b-20bcd9153614-kube-api-access-tq2wx\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357219 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357244 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-error\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357266 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-service-ca\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357331 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357358 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-router-certs\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357426 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357443 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357457 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xv2v\" (UniqueName: \"kubernetes.io/projected/4db3dc80-10df-4d72-925f-ab5c927bc6ef-kube-api-access-5xv2v\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357470 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357484 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357496 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357509 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357521 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.357533 4773 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4db3dc80-10df-4d72-925f-ab5c927bc6ef-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.371035 4773 generic.go:334] "Generic (PLEG): container finished" podID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" containerID="eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae" exitCode=0 Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.371093 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.371091 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" event={"ID":"4db3dc80-10df-4d72-925f-ab5c927bc6ef","Type":"ContainerDied","Data":"eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae"} Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.371206 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwwct" event={"ID":"4db3dc80-10df-4d72-925f-ab5c927bc6ef","Type":"ContainerDied","Data":"a565c7c274ccc65d6ae3457d80e53b527ab861b93d1caa877bcb03c851b1cf84"} Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.371232 4773 scope.go:117] "RemoveContainer" containerID="eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.395967 4773 scope.go:117] "RemoveContainer" containerID="eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae" Jan 22 12:00:44 crc kubenswrapper[4773]: E0122 12:00:44.398816 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae\": container with ID starting with eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae not found: ID does not exist" containerID="eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.398887 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae"} err="failed to get container status \"eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae\": rpc error: code = NotFound desc = could not find container \"eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae\": container with ID starting with eac1dcfef43371100e9304497bfebdb81e98c2e4d7d99489061b48eddcb8f0ae not found: ID does not exist" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.402366 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwwct"] Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.405999 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwwct"] Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.458853 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459228 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-router-certs\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459398 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459514 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-login\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459645 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/83e29e56-f54a-4296-856b-20bcd9153614-audit-dir\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459758 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-cliconfig\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459874 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-audit-policies\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459984 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-session\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460097 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-serving-cert\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460194 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.459805 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/83e29e56-f54a-4296-856b-20bcd9153614-audit-dir\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460276 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq2wx\" (UniqueName: \"kubernetes.io/projected/83e29e56-f54a-4296-856b-20bcd9153614-kube-api-access-tq2wx\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460386 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460423 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-error\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460453 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-service-ca\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.460943 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-cliconfig\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.461151 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-service-ca\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.461369 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.462488 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/83e29e56-f54a-4296-856b-20bcd9153614-audit-policies\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.463824 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.463880 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-session\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.464391 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-serving-cert\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.464712 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-error\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.465649 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.465783 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-router-certs\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.466149 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.477703 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/83e29e56-f54a-4296-856b-20bcd9153614-v4-0-config-user-template-login\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.487979 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq2wx\" (UniqueName: \"kubernetes.io/projected/83e29e56-f54a-4296-856b-20bcd9153614-kube-api-access-tq2wx\") pod \"oauth-openshift-bfb76855c-8rnfr\" (UID: \"83e29e56-f54a-4296-856b-20bcd9153614\") " pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.524648 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-pftfw" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.581412 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dzt2p"] Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.617452 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:44 crc kubenswrapper[4773]: I0122 12:00:44.664675 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4db3dc80-10df-4d72-925f-ab5c927bc6ef" path="/var/lib/kubelet/pods/4db3dc80-10df-4d72-925f-ab5c927bc6ef/volumes" Jan 22 12:00:45 crc kubenswrapper[4773]: I0122 12:00:45.000728 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-bfb76855c-8rnfr"] Jan 22 12:00:45 crc kubenswrapper[4773]: W0122 12:00:45.011544 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83e29e56_f54a_4296_856b_20bcd9153614.slice/crio-a801a07ba87aa300373040f401a8f3b4bb9335c4a5c65cabade3d704ef3cefe5 WatchSource:0}: Error finding container a801a07ba87aa300373040f401a8f3b4bb9335c4a5c65cabade3d704ef3cefe5: Status 404 returned error can't find the container with id a801a07ba87aa300373040f401a8f3b4bb9335c4a5c65cabade3d704ef3cefe5 Jan 22 12:00:45 crc kubenswrapper[4773]: I0122 12:00:45.377238 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" event={"ID":"83e29e56-f54a-4296-856b-20bcd9153614","Type":"ContainerStarted","Data":"1c71314db136ae0797754e76dcf9495cc4ef6dad8f299e10e74af5342cd5cf87"} Jan 22 12:00:45 crc kubenswrapper[4773]: I0122 12:00:45.377555 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" event={"ID":"83e29e56-f54a-4296-856b-20bcd9153614","Type":"ContainerStarted","Data":"a801a07ba87aa300373040f401a8f3b4bb9335c4a5c65cabade3d704ef3cefe5"} Jan 22 12:00:45 crc kubenswrapper[4773]: I0122 12:00:45.377570 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:00:45 crc kubenswrapper[4773]: I0122 12:00:45.410379 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" podStartSLOduration=27.41036044 podStartE2EDuration="27.41036044s" podCreationTimestamp="2026-01-22 12:00:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:00:45.40895048 +0000 UTC m=+352.987066315" watchObservedRunningTime="2026-01-22 12:00:45.41036044 +0000 UTC m=+352.988476275" Jan 22 12:00:45 crc kubenswrapper[4773]: I0122 12:00:45.982988 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-bfb76855c-8rnfr" Jan 22 12:01:04 crc kubenswrapper[4773]: I0122 12:01:04.073834 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:01:04 crc kubenswrapper[4773]: I0122 12:01:04.074398 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:01:09 crc kubenswrapper[4773]: I0122 12:01:09.622584 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" podUID="74d68a97-0d9e-4eb0-886f-a61ae22935ab" containerName="registry" containerID="cri-o://16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15" gracePeriod=30 Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.010166 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.146521 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74d68a97-0d9e-4eb0-886f-a61ae22935ab-ca-trust-extracted\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.146786 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.147205 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-trusted-ca\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.147324 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74d68a97-0d9e-4eb0-886f-a61ae22935ab-installation-pull-secrets\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.148179 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.147401 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfwl9\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-kube-api-access-nfwl9\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.148539 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-certificates\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.148578 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-bound-sa-token\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.148606 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-tls\") pod \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\" (UID: \"74d68a97-0d9e-4eb0-886f-a61ae22935ab\") " Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.148926 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.149236 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.154734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74d68a97-0d9e-4eb0-886f-a61ae22935ab-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.155048 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.155680 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-kube-api-access-nfwl9" (OuterVolumeSpecName: "kube-api-access-nfwl9") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "kube-api-access-nfwl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.156075 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.167165 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.174074 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74d68a97-0d9e-4eb0-886f-a61ae22935ab-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "74d68a97-0d9e-4eb0-886f-a61ae22935ab" (UID: "74d68a97-0d9e-4eb0-886f-a61ae22935ab"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.250915 4773 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74d68a97-0d9e-4eb0-886f-a61ae22935ab-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.251339 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfwl9\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-kube-api-access-nfwl9\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.251481 4773 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.251630 4773 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.251799 4773 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74d68a97-0d9e-4eb0-886f-a61ae22935ab-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.251968 4773 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74d68a97-0d9e-4eb0-886f-a61ae22935ab-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.527789 4773 generic.go:334] "Generic (PLEG): container finished" podID="74d68a97-0d9e-4eb0-886f-a61ae22935ab" containerID="16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15" exitCode=0 Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.527842 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" event={"ID":"74d68a97-0d9e-4eb0-886f-a61ae22935ab","Type":"ContainerDied","Data":"16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15"} Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.527892 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.528444 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-dzt2p" event={"ID":"74d68a97-0d9e-4eb0-886f-a61ae22935ab","Type":"ContainerDied","Data":"8944987e21a184f5989bdef1f5ffd54a1d443ccb95dffa82ebe4f007f21d6ab3"} Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.528472 4773 scope.go:117] "RemoveContainer" containerID="16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.558956 4773 scope.go:117] "RemoveContainer" containerID="16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15" Jan 22 12:01:10 crc kubenswrapper[4773]: E0122 12:01:10.560401 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15\": container with ID starting with 16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15 not found: ID does not exist" containerID="16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.560536 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15"} err="failed to get container status \"16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15\": rpc error: code = NotFound desc = could not find container \"16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15\": container with ID starting with 16d390b079acd88834765f02fc828860824b5fab4222befe2ede0a1f1be86d15 not found: ID does not exist" Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.562554 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dzt2p"] Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.568243 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-dzt2p"] Jan 22 12:01:10 crc kubenswrapper[4773]: I0122 12:01:10.669738 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74d68a97-0d9e-4eb0-886f-a61ae22935ab" path="/var/lib/kubelet/pods/74d68a97-0d9e-4eb0-886f-a61ae22935ab/volumes" Jan 22 12:01:34 crc kubenswrapper[4773]: I0122 12:01:34.074888 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:01:34 crc kubenswrapper[4773]: I0122 12:01:34.075963 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.073999 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.074560 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.074602 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.075101 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"267acbc2d90752d5471c6ac9652aabd9b488a23103e0c15252485cd6a27ff8d2"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.075147 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://267acbc2d90752d5471c6ac9652aabd9b488a23103e0c15252485cd6a27ff8d2" gracePeriod=600 Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.835019 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="267acbc2d90752d5471c6ac9652aabd9b488a23103e0c15252485cd6a27ff8d2" exitCode=0 Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.835108 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"267acbc2d90752d5471c6ac9652aabd9b488a23103e0c15252485cd6a27ff8d2"} Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.835602 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"5b191b48671b70945f5bd04770a902a07ca9065798b07b504fd658fdbc68a485"} Jan 22 12:02:04 crc kubenswrapper[4773]: I0122 12:02:04.835630 4773 scope.go:117] "RemoveContainer" containerID="c28d1b06f0f72402917f677384d42321b60562717490838300e41a555cdb40d6" Jan 22 12:04:04 crc kubenswrapper[4773]: I0122 12:04:04.074805 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:04:04 crc kubenswrapper[4773]: I0122 12:04:04.075340 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:04:34 crc kubenswrapper[4773]: I0122 12:04:34.074394 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:04:34 crc kubenswrapper[4773]: I0122 12:04:34.074888 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.074994 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.075825 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.075869 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.076473 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b191b48671b70945f5bd04770a902a07ca9065798b07b504fd658fdbc68a485"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.076530 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://5b191b48671b70945f5bd04770a902a07ca9065798b07b504fd658fdbc68a485" gracePeriod=600 Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.965837 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="5b191b48671b70945f5bd04770a902a07ca9065798b07b504fd658fdbc68a485" exitCode=0 Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.965892 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"5b191b48671b70945f5bd04770a902a07ca9065798b07b504fd658fdbc68a485"} Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.966160 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"7882b6d2596720092fbfa9cbf9f782df49bd4b06ac71c0c20c36688e60fbb228"} Jan 22 12:05:04 crc kubenswrapper[4773]: I0122 12:05:04.966181 4773 scope.go:117] "RemoveContainer" containerID="267acbc2d90752d5471c6ac9652aabd9b488a23103e0c15252485cd6a27ff8d2" Jan 22 12:07:04 crc kubenswrapper[4773]: I0122 12:07:04.074229 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:07:04 crc kubenswrapper[4773]: I0122 12:07:04.075248 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.141083 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pplsq"] Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142139 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="northd" containerID="cri-o://604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142219 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="sbdb" containerID="cri-o://2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142261 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142248 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="nbdb" containerID="cri-o://c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142334 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-acl-logging" containerID="cri-o://f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142362 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-controller" containerID="cri-o://fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.142429 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-node" containerID="cri-o://c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.180562 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" containerID="cri-o://a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" gracePeriod=30 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.425016 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/3.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.427316 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovn-acl-logging/0.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.427794 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovn-controller/0.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.428248 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.493376 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-systemd\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.493430 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-ovn\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.493465 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-config\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.493497 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-systemd-units\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.493614 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.493979 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494421 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-script-lib\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494473 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-ovn-kubernetes\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494505 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4szgh\" (UniqueName: \"kubernetes.io/projected/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-kube-api-access-4szgh\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494524 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-etc-openvswitch\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494546 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-log-socket\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494567 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-kubelet\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494590 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-bin\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494611 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovn-node-metrics-cert\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494633 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-slash\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494660 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-openvswitch\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494685 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-netd\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494726 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-var-lib-cni-networks-ovn-kubernetes\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494767 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-netns\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494799 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-env-overrides\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494870 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-node-log\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494887 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494899 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-var-lib-openvswitch\") pod \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\" (UID: \"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5\") " Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.494919 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.495187 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.495217 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.495570 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.495608 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-log-socket" (OuterVolumeSpecName: "log-socket") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.495633 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.495653 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.496577 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-slash" (OuterVolumeSpecName: "host-slash") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.496881 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.496950 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.496982 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.497040 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-node-log" (OuterVolumeSpecName: "node-log") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.497583 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.498427 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.499820 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-kube-api-access-4szgh" (OuterVolumeSpecName: "kube-api-access-4szgh") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "kube-api-access-4szgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.499819 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.500061 4773 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.500079 4773 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.500090 4773 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.500103 4773 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.506451 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" (UID: "7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521414 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hgfcn"] Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521690 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521712 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521728 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521740 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521751 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521760 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521769 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521777 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521791 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74d68a97-0d9e-4eb0-886f-a61ae22935ab" containerName="registry" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521799 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="74d68a97-0d9e-4eb0-886f-a61ae22935ab" containerName="registry" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521812 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="northd" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521821 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="northd" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521832 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-node" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521841 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-node" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521852 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-acl-logging" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521860 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-acl-logging" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521869 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="nbdb" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521877 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="nbdb" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521892 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kubecfg-setup" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521900 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kubecfg-setup" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521914 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="sbdb" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521922 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="sbdb" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.521936 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.521945 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522057 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522071 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="nbdb" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522082 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="74d68a97-0d9e-4eb0-886f-a61ae22935ab" containerName="registry" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522091 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522102 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="northd" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522112 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="sbdb" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522124 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522134 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522143 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-node" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522154 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522168 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovn-acl-logging" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.522335 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522346 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.522359 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522368 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522490 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.522718 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerName="ovnkube-controller" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.524483 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-systemd-units\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601587 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-ovn\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601621 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-kubelet\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601661 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-cni-netd\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601706 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-systemd\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601724 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601741 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-var-lib-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601758 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601772 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-log-socket\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601787 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43f06601-9530-49b1-9058-cb17125256f0-ovn-node-metrics-cert\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601801 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-run-ovn-kubernetes\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601830 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-env-overrides\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601846 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgkrq\" (UniqueName: \"kubernetes.io/projected/43f06601-9530-49b1-9058-cb17125256f0-kube-api-access-lgkrq\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601859 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-ovnkube-config\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601904 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-slash\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601923 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-ovnkube-script-lib\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601946 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-node-log\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601965 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-etc-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601981 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-cni-bin\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.601994 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-run-netns\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602040 4773 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602049 4773 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602060 4773 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602069 4773 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602078 4773 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602086 4773 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-node-log\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602094 4773 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602102 4773 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602112 4773 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602121 4773 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602130 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4szgh\" (UniqueName: \"kubernetes.io/projected/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-kube-api-access-4szgh\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602138 4773 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602148 4773 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-log-socket\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602161 4773 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602169 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.602177 4773 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5-host-slash\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.678826 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/2.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.679308 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/1.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.679347 4773 generic.go:334] "Generic (PLEG): container finished" podID="73fd13f5-159b-444d-9d03-1e5fdd943673" containerID="1e265729999b7277b3616772c91c49a0387aee1dc557febdfe1ffcbd6f8e4f5a" exitCode=2 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.679411 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerDied","Data":"1e265729999b7277b3616772c91c49a0387aee1dc557febdfe1ffcbd6f8e4f5a"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.679460 4773 scope.go:117] "RemoveContainer" containerID="64176368ef89ead7cca0621b251ccf3db24dbb357359840e82113f80f8d4b7cd" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.680304 4773 scope.go:117] "RemoveContainer" containerID="1e265729999b7277b3616772c91c49a0387aee1dc557febdfe1ffcbd6f8e4f5a" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.683196 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovnkube-controller/3.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.686706 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovn-acl-logging/0.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687231 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-pplsq_7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/ovn-controller/0.log" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687716 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" exitCode=0 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687744 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" exitCode=0 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687756 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" exitCode=0 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687767 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" exitCode=0 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687777 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" exitCode=0 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687785 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" exitCode=0 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687793 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" exitCode=143 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687802 4773 generic.go:334] "Generic (PLEG): container finished" podID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" exitCode=143 Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687825 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687853 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687869 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687883 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687895 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687909 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687924 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687936 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687943 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687951 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687958 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687966 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687975 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687982 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687989 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.687996 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688007 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688018 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688027 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688035 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688043 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688050 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688057 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688064 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688071 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688203 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688221 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688263 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688427 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688447 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688455 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688479 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688687 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688696 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688703 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688713 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688720 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688727 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688740 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" event={"ID":"7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5","Type":"ContainerDied","Data":"a98c03095c6184e8a30264e8a96c3f4785f57162b0323cc2ae78b9842ffb6b0a"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688925 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688944 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688953 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688960 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688968 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688976 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688983 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688990 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.688997 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.689004 4773 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.689477 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pplsq" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.702633 4773 scope.go:117] "RemoveContainer" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703541 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703586 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-var-lib-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703603 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703618 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-log-socket\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703634 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43f06601-9530-49b1-9058-cb17125256f0-ovn-node-metrics-cert\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703648 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-run-ovn-kubernetes\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703683 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-env-overrides\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703698 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgkrq\" (UniqueName: \"kubernetes.io/projected/43f06601-9530-49b1-9058-cb17125256f0-kube-api-access-lgkrq\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703712 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-ovnkube-config\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703728 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-slash\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703745 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-ovnkube-script-lib\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703767 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-node-log\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703784 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-etc-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703798 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-cni-bin\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703845 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-run-netns\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703865 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-systemd-units\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703891 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-ovn\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703909 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-kubelet\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703925 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-cni-netd\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703940 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-systemd\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.703998 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-systemd\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704033 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704055 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-var-lib-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704072 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704093 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-log-socket\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704252 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-node-log\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704325 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-run-ovn-kubernetes\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704893 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-env-overrides\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.704973 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-etc-openvswitch\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705049 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-cni-bin\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705117 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-run-netns\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705209 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-systemd-units\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705238 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-run-ovn\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705261 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-kubelet\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705307 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-cni-netd\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705505 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/43f06601-9530-49b1-9058-cb17125256f0-host-slash\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.705636 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-ovnkube-config\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.706082 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/43f06601-9530-49b1-9058-cb17125256f0-ovnkube-script-lib\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.708990 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/43f06601-9530-49b1-9058-cb17125256f0-ovn-node-metrics-cert\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.723993 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgkrq\" (UniqueName: \"kubernetes.io/projected/43f06601-9530-49b1-9058-cb17125256f0-kube-api-access-lgkrq\") pod \"ovnkube-node-hgfcn\" (UID: \"43f06601-9530-49b1-9058-cb17125256f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.726073 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pplsq"] Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.729324 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pplsq"] Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.730648 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.753626 4773 scope.go:117] "RemoveContainer" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.770158 4773 scope.go:117] "RemoveContainer" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.785236 4773 scope.go:117] "RemoveContainer" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.798565 4773 scope.go:117] "RemoveContainer" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.816359 4773 scope.go:117] "RemoveContainer" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.832892 4773 scope.go:117] "RemoveContainer" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.840418 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.848230 4773 scope.go:117] "RemoveContainer" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.868188 4773 scope.go:117] "RemoveContainer" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" Jan 22 12:07:10 crc kubenswrapper[4773]: W0122 12:07:10.871073 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43f06601_9530_49b1_9058_cb17125256f0.slice/crio-cb669aad2aa52923c023c3db1ed078c9113a9af90335bb7aaf57389222ed4edb WatchSource:0}: Error finding container cb669aad2aa52923c023c3db1ed078c9113a9af90335bb7aaf57389222ed4edb: Status 404 returned error can't find the container with id cb669aad2aa52923c023c3db1ed078c9113a9af90335bb7aaf57389222ed4edb Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.883708 4773 scope.go:117] "RemoveContainer" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.884934 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": container with ID starting with a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5 not found: ID does not exist" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.885003 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} err="failed to get container status \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": rpc error: code = NotFound desc = could not find container \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": container with ID starting with a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.885037 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.885338 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": container with ID starting with 9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6 not found: ID does not exist" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.885373 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} err="failed to get container status \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": rpc error: code = NotFound desc = could not find container \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": container with ID starting with 9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.885397 4773 scope.go:117] "RemoveContainer" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.885698 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": container with ID starting with 2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa not found: ID does not exist" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.885728 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} err="failed to get container status \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": rpc error: code = NotFound desc = could not find container \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": container with ID starting with 2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.885751 4773 scope.go:117] "RemoveContainer" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.886025 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": container with ID starting with c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d not found: ID does not exist" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.886048 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} err="failed to get container status \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": rpc error: code = NotFound desc = could not find container \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": container with ID starting with c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.886064 4773 scope.go:117] "RemoveContainer" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.886450 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": container with ID starting with 604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d not found: ID does not exist" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.886482 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} err="failed to get container status \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": rpc error: code = NotFound desc = could not find container \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": container with ID starting with 604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.886507 4773 scope.go:117] "RemoveContainer" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.886750 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": container with ID starting with 5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4 not found: ID does not exist" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.886775 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} err="failed to get container status \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": rpc error: code = NotFound desc = could not find container \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": container with ID starting with 5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.886794 4773 scope.go:117] "RemoveContainer" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.887218 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": container with ID starting with c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d not found: ID does not exist" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.887241 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} err="failed to get container status \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": rpc error: code = NotFound desc = could not find container \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": container with ID starting with c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.887256 4773 scope.go:117] "RemoveContainer" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.887539 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": container with ID starting with f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359 not found: ID does not exist" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.887570 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} err="failed to get container status \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": rpc error: code = NotFound desc = could not find container \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": container with ID starting with f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.887591 4773 scope.go:117] "RemoveContainer" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.887922 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": container with ID starting with fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1 not found: ID does not exist" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.888073 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} err="failed to get container status \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": rpc error: code = NotFound desc = could not find container \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": container with ID starting with fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.888168 4773 scope.go:117] "RemoveContainer" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" Jan 22 12:07:10 crc kubenswrapper[4773]: E0122 12:07:10.888586 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": container with ID starting with 262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d not found: ID does not exist" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.888611 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} err="failed to get container status \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": rpc error: code = NotFound desc = could not find container \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": container with ID starting with 262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.888626 4773 scope.go:117] "RemoveContainer" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.888911 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} err="failed to get container status \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": rpc error: code = NotFound desc = could not find container \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": container with ID starting with a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.889016 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.889519 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} err="failed to get container status \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": rpc error: code = NotFound desc = could not find container \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": container with ID starting with 9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.889708 4773 scope.go:117] "RemoveContainer" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.890037 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} err="failed to get container status \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": rpc error: code = NotFound desc = could not find container \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": container with ID starting with 2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.890138 4773 scope.go:117] "RemoveContainer" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.890602 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} err="failed to get container status \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": rpc error: code = NotFound desc = could not find container \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": container with ID starting with c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.890655 4773 scope.go:117] "RemoveContainer" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.890935 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} err="failed to get container status \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": rpc error: code = NotFound desc = could not find container \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": container with ID starting with 604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.891105 4773 scope.go:117] "RemoveContainer" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.891467 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} err="failed to get container status \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": rpc error: code = NotFound desc = could not find container \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": container with ID starting with 5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.891492 4773 scope.go:117] "RemoveContainer" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.891744 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} err="failed to get container status \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": rpc error: code = NotFound desc = could not find container \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": container with ID starting with c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.891774 4773 scope.go:117] "RemoveContainer" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892002 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} err="failed to get container status \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": rpc error: code = NotFound desc = could not find container \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": container with ID starting with f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892022 4773 scope.go:117] "RemoveContainer" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892266 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} err="failed to get container status \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": rpc error: code = NotFound desc = could not find container \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": container with ID starting with fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892315 4773 scope.go:117] "RemoveContainer" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892666 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} err="failed to get container status \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": rpc error: code = NotFound desc = could not find container \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": container with ID starting with 262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892691 4773 scope.go:117] "RemoveContainer" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892888 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} err="failed to get container status \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": rpc error: code = NotFound desc = could not find container \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": container with ID starting with a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.892908 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.893670 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} err="failed to get container status \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": rpc error: code = NotFound desc = could not find container \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": container with ID starting with 9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.893697 4773 scope.go:117] "RemoveContainer" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.894009 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} err="failed to get container status \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": rpc error: code = NotFound desc = could not find container \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": container with ID starting with 2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.894230 4773 scope.go:117] "RemoveContainer" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.894609 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} err="failed to get container status \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": rpc error: code = NotFound desc = could not find container \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": container with ID starting with c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.894729 4773 scope.go:117] "RemoveContainer" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.895143 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} err="failed to get container status \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": rpc error: code = NotFound desc = could not find container \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": container with ID starting with 604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.895227 4773 scope.go:117] "RemoveContainer" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.895585 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} err="failed to get container status \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": rpc error: code = NotFound desc = could not find container \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": container with ID starting with 5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.895605 4773 scope.go:117] "RemoveContainer" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.895831 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} err="failed to get container status \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": rpc error: code = NotFound desc = could not find container \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": container with ID starting with c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.895848 4773 scope.go:117] "RemoveContainer" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.896035 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} err="failed to get container status \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": rpc error: code = NotFound desc = could not find container \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": container with ID starting with f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.896119 4773 scope.go:117] "RemoveContainer" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.896487 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} err="failed to get container status \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": rpc error: code = NotFound desc = could not find container \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": container with ID starting with fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.896625 4773 scope.go:117] "RemoveContainer" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.897089 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} err="failed to get container status \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": rpc error: code = NotFound desc = could not find container \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": container with ID starting with 262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.897124 4773 scope.go:117] "RemoveContainer" containerID="a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.897383 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5"} err="failed to get container status \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": rpc error: code = NotFound desc = could not find container \"a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5\": container with ID starting with a6c3054781a40864a0293521702d4dda3d3d974a22f0ed6c1d32f128d0ee17d5 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.897494 4773 scope.go:117] "RemoveContainer" containerID="9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.897847 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6"} err="failed to get container status \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": rpc error: code = NotFound desc = could not find container \"9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6\": container with ID starting with 9a7c39e6e8d60fb8b52b7f81ab907ea63bc50f2fde47e1696de86609f1d6eea6 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.897868 4773 scope.go:117] "RemoveContainer" containerID="2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.898164 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa"} err="failed to get container status \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": rpc error: code = NotFound desc = could not find container \"2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa\": container with ID starting with 2fedc8956f8c200cff48de837534307e8f5b14493e3a01c46b99b58775a074aa not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.898197 4773 scope.go:117] "RemoveContainer" containerID="c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.898545 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d"} err="failed to get container status \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": rpc error: code = NotFound desc = could not find container \"c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d\": container with ID starting with c39ab48710e74d2dd71cdb6a73a20aa53fddbd359a6ad116823695ab2d0c713d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.898567 4773 scope.go:117] "RemoveContainer" containerID="604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.898881 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d"} err="failed to get container status \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": rpc error: code = NotFound desc = could not find container \"604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d\": container with ID starting with 604fa8da99555482d14fc31f81aee0bdf40b3bff51fdab0940bd807975468e0d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.898915 4773 scope.go:117] "RemoveContainer" containerID="5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.899417 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4"} err="failed to get container status \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": rpc error: code = NotFound desc = could not find container \"5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4\": container with ID starting with 5c146810a70f39657c22b3e7e9a7515d31d866946ac5803289fcbf73336ffec4 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.899513 4773 scope.go:117] "RemoveContainer" containerID="c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.899834 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d"} err="failed to get container status \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": rpc error: code = NotFound desc = could not find container \"c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d\": container with ID starting with c5c41c315d22b542b5df795fff40353710b984f1cec0e91ee8d3d9e4e209790d not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.899857 4773 scope.go:117] "RemoveContainer" containerID="f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.900249 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359"} err="failed to get container status \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": rpc error: code = NotFound desc = could not find container \"f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359\": container with ID starting with f4f0f6fc3b1852242d69e22e3a987e67ce8f3bd1a6be0cb38a092c1e55ffa359 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.900353 4773 scope.go:117] "RemoveContainer" containerID="fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.900726 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1"} err="failed to get container status \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": rpc error: code = NotFound desc = could not find container \"fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1\": container with ID starting with fe68aa35e8bb17aad4aaafdda7f889c4b9abae58847065d7b6687195a9bef4d1 not found: ID does not exist" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.900815 4773 scope.go:117] "RemoveContainer" containerID="262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d" Jan 22 12:07:10 crc kubenswrapper[4773]: I0122 12:07:10.901114 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d"} err="failed to get container status \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": rpc error: code = NotFound desc = could not find container \"262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d\": container with ID starting with 262796a22455c0f4bebdf289404392cd0a53cefe26bb76bfaea6d2a879be232d not found: ID does not exist" Jan 22 12:07:11 crc kubenswrapper[4773]: I0122 12:07:11.695556 4773 generic.go:334] "Generic (PLEG): container finished" podID="43f06601-9530-49b1-9058-cb17125256f0" containerID="f0a385badcc87b09fa6f7e0c02000f6d667c54d08af044b285abd29711c8e50e" exitCode=0 Jan 22 12:07:11 crc kubenswrapper[4773]: I0122 12:07:11.695629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerDied","Data":"f0a385badcc87b09fa6f7e0c02000f6d667c54d08af044b285abd29711c8e50e"} Jan 22 12:07:11 crc kubenswrapper[4773]: I0122 12:07:11.695928 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"cb669aad2aa52923c023c3db1ed078c9113a9af90335bb7aaf57389222ed4edb"} Jan 22 12:07:11 crc kubenswrapper[4773]: I0122 12:07:11.698882 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/2.log" Jan 22 12:07:11 crc kubenswrapper[4773]: I0122 12:07:11.698935 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsndt" event={"ID":"73fd13f5-159b-444d-9d03-1e5fdd943673","Type":"ContainerStarted","Data":"462640a560e7e10953b5988bc3994c4864badc86271ce37e9601270c328635d2"} Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.665239 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5" path="/var/lib/kubelet/pods/7f8370d6-6cd8-4e4c-8bad-aaaa3ac332d5/volumes" Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.707139 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"b68c7bae41a0c7086631130400c9f98f484a536111f9cadd36a7470e4b5e61bc"} Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.707184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"9f20f953d3fe4492b6f2513f3ec62e73558dab6062dc99ffec088cd58755e35d"} Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.707198 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"48b75888c2e2d1a2b120c0fd61dadb7aafa649642b4bc3fa82d3959ec70b3d24"} Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.707210 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"be6c0a9a735a19d73848d1f3462c2c36aa183923a38820631045a7a24f551037"} Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.707222 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"41f76d4f1279ff2626140b28c39b6877064c52827084c814b8a6bd4a9aa4783f"} Jan 22 12:07:12 crc kubenswrapper[4773]: I0122 12:07:12.707234 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"86ee8dc599366b3227319efe7584a5603b579ec4ff4298c53f8e508c42a2486d"} Jan 22 12:07:14 crc kubenswrapper[4773]: I0122 12:07:14.722774 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"a37ea8410cf8536e66670ea48b5f01144c28145b299818954c033ab84383f1fc"} Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.300847 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-xtlsk"] Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.301988 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.303480 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.303894 4773 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-cb2wx" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.304390 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.304547 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.388800 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-crc-storage\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.388866 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-node-mnt\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.388919 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpd4k\" (UniqueName: \"kubernetes.io/projected/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-kube-api-access-zpd4k\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.490385 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-crc-storage\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.490449 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-node-mnt\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.490498 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpd4k\" (UniqueName: \"kubernetes.io/projected/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-kube-api-access-zpd4k\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.490797 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-node-mnt\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.491371 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-crc-storage\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.522937 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpd4k\" (UniqueName: \"kubernetes.io/projected/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-kube-api-access-zpd4k\") pod \"crc-storage-crc-xtlsk\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: I0122 12:07:16.617382 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: E0122 12:07:16.647610 4773 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(2263e8baf1efb0b991639b4aee08ae53d82c506dd12e9e9edbfd0d387b2e8741): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 12:07:16 crc kubenswrapper[4773]: E0122 12:07:16.647697 4773 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(2263e8baf1efb0b991639b4aee08ae53d82c506dd12e9e9edbfd0d387b2e8741): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: E0122 12:07:16.647720 4773 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(2263e8baf1efb0b991639b4aee08ae53d82c506dd12e9e9edbfd0d387b2e8741): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:16 crc kubenswrapper[4773]: E0122 12:07:16.647772 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-xtlsk_crc-storage(03cd6a75-2bfa-4fcd-b6fe-658685d87f4b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-xtlsk_crc-storage(03cd6a75-2bfa-4fcd-b6fe-658685d87f4b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(2263e8baf1efb0b991639b4aee08ae53d82c506dd12e9e9edbfd0d387b2e8741): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-xtlsk" podUID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.696217 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xtlsk"] Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.696752 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.697251 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:17 crc kubenswrapper[4773]: E0122 12:07:17.719054 4773 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(6f8c054e120ab791479c6922d89a6b647866a1cc74a0cdfd6c1f286a08c6b9a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 12:07:17 crc kubenswrapper[4773]: E0122 12:07:17.719125 4773 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(6f8c054e120ab791479c6922d89a6b647866a1cc74a0cdfd6c1f286a08c6b9a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:17 crc kubenswrapper[4773]: E0122 12:07:17.719151 4773 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(6f8c054e120ab791479c6922d89a6b647866a1cc74a0cdfd6c1f286a08c6b9a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:17 crc kubenswrapper[4773]: E0122 12:07:17.719200 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-xtlsk_crc-storage(03cd6a75-2bfa-4fcd-b6fe-658685d87f4b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-xtlsk_crc-storage(03cd6a75-2bfa-4fcd-b6fe-658685d87f4b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-xtlsk_crc-storage_03cd6a75-2bfa-4fcd-b6fe-658685d87f4b_0(6f8c054e120ab791479c6922d89a6b647866a1cc74a0cdfd6c1f286a08c6b9a7): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-xtlsk" podUID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.742182 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" event={"ID":"43f06601-9530-49b1-9058-cb17125256f0","Type":"ContainerStarted","Data":"6ddc6725ad1acb4f2ed75c5abfd887166ea84349f5814494b4eca4f41b11a3b1"} Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.742587 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.742625 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.780905 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" podStartSLOduration=7.78088669 podStartE2EDuration="7.78088669s" podCreationTimestamp="2026-01-22 12:07:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:07:17.776100709 +0000 UTC m=+745.354216574" watchObservedRunningTime="2026-01-22 12:07:17.78088669 +0000 UTC m=+745.359002515" Jan 22 12:07:17 crc kubenswrapper[4773]: I0122 12:07:17.783697 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:18 crc kubenswrapper[4773]: I0122 12:07:18.747834 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:18 crc kubenswrapper[4773]: I0122 12:07:18.785529 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:22 crc kubenswrapper[4773]: I0122 12:07:22.584041 4773 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 12:07:29 crc kubenswrapper[4773]: I0122 12:07:29.657453 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:29 crc kubenswrapper[4773]: I0122 12:07:29.658365 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:30 crc kubenswrapper[4773]: I0122 12:07:30.070438 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xtlsk"] Jan 22 12:07:30 crc kubenswrapper[4773]: I0122 12:07:30.076673 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:07:30 crc kubenswrapper[4773]: I0122 12:07:30.812381 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xtlsk" event={"ID":"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b","Type":"ContainerStarted","Data":"5a9998fecb156b423e0e24c44e0d5c99ef5d953b09b98dfc0d4b2b21aaf4e453"} Jan 22 12:07:31 crc kubenswrapper[4773]: I0122 12:07:31.819777 4773 generic.go:334] "Generic (PLEG): container finished" podID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" containerID="7e506d06cf8f20eb5b3b83dfa4bc8179f298ee6aeee03f6e83ffacf95c015a0d" exitCode=0 Jan 22 12:07:31 crc kubenswrapper[4773]: I0122 12:07:31.819942 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xtlsk" event={"ID":"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b","Type":"ContainerDied","Data":"7e506d06cf8f20eb5b3b83dfa4bc8179f298ee6aeee03f6e83ffacf95c015a0d"} Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.136116 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.230718 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-crc-storage\") pod \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.230793 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-node-mnt\") pod \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.230870 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpd4k\" (UniqueName: \"kubernetes.io/projected/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-kube-api-access-zpd4k\") pod \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\" (UID: \"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b\") " Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.231360 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" (UID: "03cd6a75-2bfa-4fcd-b6fe-658685d87f4b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.234828 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-kube-api-access-zpd4k" (OuterVolumeSpecName: "kube-api-access-zpd4k") pod "03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" (UID: "03cd6a75-2bfa-4fcd-b6fe-658685d87f4b"). InnerVolumeSpecName "kube-api-access-zpd4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.242154 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" (UID: "03cd6a75-2bfa-4fcd-b6fe-658685d87f4b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.332261 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpd4k\" (UniqueName: \"kubernetes.io/projected/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-kube-api-access-zpd4k\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.332321 4773 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.332338 4773 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.834074 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xtlsk" event={"ID":"03cd6a75-2bfa-4fcd-b6fe-658685d87f4b","Type":"ContainerDied","Data":"5a9998fecb156b423e0e24c44e0d5c99ef5d953b09b98dfc0d4b2b21aaf4e453"} Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.834135 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a9998fecb156b423e0e24c44e0d5c99ef5d953b09b98dfc0d4b2b21aaf4e453" Jan 22 12:07:33 crc kubenswrapper[4773]: I0122 12:07:33.834131 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xtlsk" Jan 22 12:07:34 crc kubenswrapper[4773]: I0122 12:07:34.073924 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:07:34 crc kubenswrapper[4773]: I0122 12:07:34.074262 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.366739 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2"] Jan 22 12:07:40 crc kubenswrapper[4773]: E0122 12:07:40.367505 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" containerName="storage" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.367522 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" containerName="storage" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.367642 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" containerName="storage" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.368418 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.370334 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.375554 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2"] Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.521910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.521970 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5psp\" (UniqueName: \"kubernetes.io/projected/fe9f12c2-7f50-4f50-8df1-c21391452cbd-kube-api-access-r5psp\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.522020 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.623062 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.623111 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5psp\" (UniqueName: \"kubernetes.io/projected/fe9f12c2-7f50-4f50-8df1-c21391452cbd-kube-api-access-r5psp\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.623147 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.623744 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.623821 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.641540 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5psp\" (UniqueName: \"kubernetes.io/projected/fe9f12c2-7f50-4f50-8df1-c21391452cbd-kube-api-access-r5psp\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.688003 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:40 crc kubenswrapper[4773]: I0122 12:07:40.863832 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hgfcn" Jan 22 12:07:41 crc kubenswrapper[4773]: I0122 12:07:41.073886 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2"] Jan 22 12:07:41 crc kubenswrapper[4773]: I0122 12:07:41.880346 4773 generic.go:334] "Generic (PLEG): container finished" podID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerID="edc6bde8e0d2dbd30b55e59f75219f249c52d225604df4059dafcbbbf3c8d84b" exitCode=0 Jan 22 12:07:41 crc kubenswrapper[4773]: I0122 12:07:41.880474 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" event={"ID":"fe9f12c2-7f50-4f50-8df1-c21391452cbd","Type":"ContainerDied","Data":"edc6bde8e0d2dbd30b55e59f75219f249c52d225604df4059dafcbbbf3c8d84b"} Jan 22 12:07:41 crc kubenswrapper[4773]: I0122 12:07:41.880713 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" event={"ID":"fe9f12c2-7f50-4f50-8df1-c21391452cbd","Type":"ContainerStarted","Data":"932d76002d71ef4fe025244a0833190c89e68e7fe342a535368fc1976c66077c"} Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.731069 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qfmft"] Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.732032 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.747539 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qfmft"] Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.852414 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-utilities\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.852461 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkqfg\" (UniqueName: \"kubernetes.io/projected/bb601f1b-4470-4b22-99e0-56f4e2590282-kube-api-access-mkqfg\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.852655 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-catalog-content\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.953686 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkqfg\" (UniqueName: \"kubernetes.io/projected/bb601f1b-4470-4b22-99e0-56f4e2590282-kube-api-access-mkqfg\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.953801 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-catalog-content\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.953833 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-utilities\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.954358 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-utilities\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.954369 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-catalog-content\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:42 crc kubenswrapper[4773]: I0122 12:07:42.979170 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkqfg\" (UniqueName: \"kubernetes.io/projected/bb601f1b-4470-4b22-99e0-56f4e2590282-kube-api-access-mkqfg\") pod \"redhat-operators-qfmft\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.046167 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.280651 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qfmft"] Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.902015 4773 generic.go:334] "Generic (PLEG): container finished" podID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerID="c0a8dd35c70687d2ccef7a502ffbd2136d9a7a24a7404f6f6909954764093ee8" exitCode=0 Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.902097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" event={"ID":"fe9f12c2-7f50-4f50-8df1-c21391452cbd","Type":"ContainerDied","Data":"c0a8dd35c70687d2ccef7a502ffbd2136d9a7a24a7404f6f6909954764093ee8"} Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.903642 4773 generic.go:334] "Generic (PLEG): container finished" podID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerID="25396cf9ebd19f01b0c5f78b566eed6a2401c87139a18f640eb053a7e08c0165" exitCode=0 Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.903697 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerDied","Data":"25396cf9ebd19f01b0c5f78b566eed6a2401c87139a18f640eb053a7e08c0165"} Jan 22 12:07:43 crc kubenswrapper[4773]: I0122 12:07:43.903729 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerStarted","Data":"e86146413bd87ff7efce66992207fc1fd033a962d009fa1acc68250fa0497618"} Jan 22 12:07:44 crc kubenswrapper[4773]: I0122 12:07:44.914279 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerStarted","Data":"ef3e520ca48ce8334b65eb44c07498d42a3d58cf0f42755904799669a7a865ee"} Jan 22 12:07:44 crc kubenswrapper[4773]: I0122 12:07:44.919325 4773 generic.go:334] "Generic (PLEG): container finished" podID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerID="a424ef96c7facb1db2e48fc6f1a7f254ac9d8290a9159574d60f7d48e9d7c87d" exitCode=0 Jan 22 12:07:44 crc kubenswrapper[4773]: I0122 12:07:44.919406 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" event={"ID":"fe9f12c2-7f50-4f50-8df1-c21391452cbd","Type":"ContainerDied","Data":"a424ef96c7facb1db2e48fc6f1a7f254ac9d8290a9159574d60f7d48e9d7c87d"} Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.160783 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.293933 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-bundle\") pod \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.294659 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-util\") pod \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.294659 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-bundle" (OuterVolumeSpecName: "bundle") pod "fe9f12c2-7f50-4f50-8df1-c21391452cbd" (UID: "fe9f12c2-7f50-4f50-8df1-c21391452cbd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.294717 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5psp\" (UniqueName: \"kubernetes.io/projected/fe9f12c2-7f50-4f50-8df1-c21391452cbd-kube-api-access-r5psp\") pod \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\" (UID: \"fe9f12c2-7f50-4f50-8df1-c21391452cbd\") " Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.294980 4773 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.302615 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe9f12c2-7f50-4f50-8df1-c21391452cbd-kube-api-access-r5psp" (OuterVolumeSpecName: "kube-api-access-r5psp") pod "fe9f12c2-7f50-4f50-8df1-c21391452cbd" (UID: "fe9f12c2-7f50-4f50-8df1-c21391452cbd"). InnerVolumeSpecName "kube-api-access-r5psp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.318749 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-util" (OuterVolumeSpecName: "util") pod "fe9f12c2-7f50-4f50-8df1-c21391452cbd" (UID: "fe9f12c2-7f50-4f50-8df1-c21391452cbd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.396856 4773 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/fe9f12c2-7f50-4f50-8df1-c21391452cbd-util\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.396917 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5psp\" (UniqueName: \"kubernetes.io/projected/fe9f12c2-7f50-4f50-8df1-c21391452cbd-kube-api-access-r5psp\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.934929 4773 generic.go:334] "Generic (PLEG): container finished" podID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerID="ef3e520ca48ce8334b65eb44c07498d42a3d58cf0f42755904799669a7a865ee" exitCode=0 Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.935088 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerDied","Data":"ef3e520ca48ce8334b65eb44c07498d42a3d58cf0f42755904799669a7a865ee"} Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.939754 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" event={"ID":"fe9f12c2-7f50-4f50-8df1-c21391452cbd","Type":"ContainerDied","Data":"932d76002d71ef4fe025244a0833190c89e68e7fe342a535368fc1976c66077c"} Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.939855 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="932d76002d71ef4fe025244a0833190c89e68e7fe342a535368fc1976c66077c" Jan 22 12:07:46 crc kubenswrapper[4773]: I0122 12:07:46.939781 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2" Jan 22 12:07:47 crc kubenswrapper[4773]: I0122 12:07:47.948495 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerStarted","Data":"030b72897ac01efff8c26a40f50d0b8b8f08be2875db3a45000631ffc6742532"} Jan 22 12:07:47 crc kubenswrapper[4773]: I0122 12:07:47.968236 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qfmft" podStartSLOduration=2.525056543 podStartE2EDuration="5.968211211s" podCreationTimestamp="2026-01-22 12:07:42 +0000 UTC" firstStartedPulling="2026-01-22 12:07:43.905483638 +0000 UTC m=+771.483599463" lastFinishedPulling="2026-01-22 12:07:47.348638296 +0000 UTC m=+774.926754131" observedRunningTime="2026-01-22 12:07:47.966535616 +0000 UTC m=+775.544651461" watchObservedRunningTime="2026-01-22 12:07:47.968211211 +0000 UTC m=+775.546327036" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.899053 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-sdxt4"] Jan 22 12:07:50 crc kubenswrapper[4773]: E0122 12:07:50.899244 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="pull" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.899257 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="pull" Jan 22 12:07:50 crc kubenswrapper[4773]: E0122 12:07:50.899267 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="extract" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.899276 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="extract" Jan 22 12:07:50 crc kubenswrapper[4773]: E0122 12:07:50.899311 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="util" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.899317 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="util" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.899424 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe9f12c2-7f50-4f50-8df1-c21391452cbd" containerName="extract" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.899818 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.902462 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.902685 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-cxdfw" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.903507 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.920904 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-sdxt4"] Jan 22 12:07:50 crc kubenswrapper[4773]: I0122 12:07:50.955359 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftw8z\" (UniqueName: \"kubernetes.io/projected/ce53468d-145a-4861-bd2c-7ece42309269-kube-api-access-ftw8z\") pod \"nmstate-operator-646758c888-sdxt4\" (UID: \"ce53468d-145a-4861-bd2c-7ece42309269\") " pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" Jan 22 12:07:51 crc kubenswrapper[4773]: I0122 12:07:51.056961 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftw8z\" (UniqueName: \"kubernetes.io/projected/ce53468d-145a-4861-bd2c-7ece42309269-kube-api-access-ftw8z\") pod \"nmstate-operator-646758c888-sdxt4\" (UID: \"ce53468d-145a-4861-bd2c-7ece42309269\") " pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" Jan 22 12:07:51 crc kubenswrapper[4773]: I0122 12:07:51.075748 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftw8z\" (UniqueName: \"kubernetes.io/projected/ce53468d-145a-4861-bd2c-7ece42309269-kube-api-access-ftw8z\") pod \"nmstate-operator-646758c888-sdxt4\" (UID: \"ce53468d-145a-4861-bd2c-7ece42309269\") " pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" Jan 22 12:07:51 crc kubenswrapper[4773]: I0122 12:07:51.213485 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" Jan 22 12:07:51 crc kubenswrapper[4773]: I0122 12:07:51.441574 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-sdxt4"] Jan 22 12:07:51 crc kubenswrapper[4773]: I0122 12:07:51.973033 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" event={"ID":"ce53468d-145a-4861-bd2c-7ece42309269","Type":"ContainerStarted","Data":"b1c2daca8384d0ae7d68a99f30e48e7032f4ce2703c8b6d2ea8d8765ef737c43"} Jan 22 12:07:53 crc kubenswrapper[4773]: I0122 12:07:53.046779 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:53 crc kubenswrapper[4773]: I0122 12:07:53.047975 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:53 crc kubenswrapper[4773]: I0122 12:07:53.087811 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:54 crc kubenswrapper[4773]: I0122 12:07:54.051827 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:55 crc kubenswrapper[4773]: I0122 12:07:55.011660 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" event={"ID":"ce53468d-145a-4861-bd2c-7ece42309269","Type":"ContainerStarted","Data":"1d0316c014bc9426ce940d0324526dd873185614040ce1114da8f6a37b67857b"} Jan 22 12:07:55 crc kubenswrapper[4773]: I0122 12:07:55.033801 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-sdxt4" podStartSLOduration=2.384894011 podStartE2EDuration="5.033771393s" podCreationTimestamp="2026-01-22 12:07:50 +0000 UTC" firstStartedPulling="2026-01-22 12:07:51.460327637 +0000 UTC m=+779.038443462" lastFinishedPulling="2026-01-22 12:07:54.109205019 +0000 UTC m=+781.687320844" observedRunningTime="2026-01-22 12:07:55.030202202 +0000 UTC m=+782.608318027" watchObservedRunningTime="2026-01-22 12:07:55.033771393 +0000 UTC m=+782.611887208" Jan 22 12:07:55 crc kubenswrapper[4773]: I0122 12:07:55.522850 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qfmft"] Jan 22 12:07:57 crc kubenswrapper[4773]: I0122 12:07:57.024030 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qfmft" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="registry-server" containerID="cri-o://030b72897ac01efff8c26a40f50d0b8b8f08be2875db3a45000631ffc6742532" gracePeriod=2 Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.030267 4773 generic.go:334] "Generic (PLEG): container finished" podID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerID="030b72897ac01efff8c26a40f50d0b8b8f08be2875db3a45000631ffc6742532" exitCode=0 Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.030319 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerDied","Data":"030b72897ac01efff8c26a40f50d0b8b8f08be2875db3a45000631ffc6742532"} Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.488272 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.622278 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkqfg\" (UniqueName: \"kubernetes.io/projected/bb601f1b-4470-4b22-99e0-56f4e2590282-kube-api-access-mkqfg\") pod \"bb601f1b-4470-4b22-99e0-56f4e2590282\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.622351 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-catalog-content\") pod \"bb601f1b-4470-4b22-99e0-56f4e2590282\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.622406 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-utilities\") pod \"bb601f1b-4470-4b22-99e0-56f4e2590282\" (UID: \"bb601f1b-4470-4b22-99e0-56f4e2590282\") " Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.623256 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-utilities" (OuterVolumeSpecName: "utilities") pod "bb601f1b-4470-4b22-99e0-56f4e2590282" (UID: "bb601f1b-4470-4b22-99e0-56f4e2590282"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.628116 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb601f1b-4470-4b22-99e0-56f4e2590282-kube-api-access-mkqfg" (OuterVolumeSpecName: "kube-api-access-mkqfg") pod "bb601f1b-4470-4b22-99e0-56f4e2590282" (UID: "bb601f1b-4470-4b22-99e0-56f4e2590282"). InnerVolumeSpecName "kube-api-access-mkqfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.724344 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkqfg\" (UniqueName: \"kubernetes.io/projected/bb601f1b-4470-4b22-99e0-56f4e2590282-kube-api-access-mkqfg\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.724381 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.736122 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb601f1b-4470-4b22-99e0-56f4e2590282" (UID: "bb601f1b-4470-4b22-99e0-56f4e2590282"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:07:58 crc kubenswrapper[4773]: I0122 12:07:58.826113 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb601f1b-4470-4b22-99e0-56f4e2590282-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.037276 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qfmft" event={"ID":"bb601f1b-4470-4b22-99e0-56f4e2590282","Type":"ContainerDied","Data":"e86146413bd87ff7efce66992207fc1fd033a962d009fa1acc68250fa0497618"} Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.037371 4773 scope.go:117] "RemoveContainer" containerID="030b72897ac01efff8c26a40f50d0b8b8f08be2875db3a45000631ffc6742532" Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.037412 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qfmft" Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.059813 4773 scope.go:117] "RemoveContainer" containerID="ef3e520ca48ce8334b65eb44c07498d42a3d58cf0f42755904799669a7a865ee" Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.073436 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qfmft"] Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.080818 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qfmft"] Jan 22 12:07:59 crc kubenswrapper[4773]: I0122 12:07:59.094572 4773 scope.go:117] "RemoveContainer" containerID="25396cf9ebd19f01b0c5f78b566eed6a2401c87139a18f640eb053a7e08c0165" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.408986 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-k2w8h"] Jan 22 12:08:00 crc kubenswrapper[4773]: E0122 12:08:00.409574 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="extract-utilities" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.409593 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="extract-utilities" Jan 22 12:08:00 crc kubenswrapper[4773]: E0122 12:08:00.409608 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="registry-server" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.409616 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="registry-server" Jan 22 12:08:00 crc kubenswrapper[4773]: E0122 12:08:00.409642 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="extract-content" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.409649 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="extract-content" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.409786 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" containerName="registry-server" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.410471 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.412442 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-jkzzm" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.423966 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-k2w8h"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.432117 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.432948 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.436445 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.445017 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvl4j\" (UniqueName: \"kubernetes.io/projected/20770427-fb79-4ce3-b2c7-1914a3e7c366-kube-api-access-rvl4j\") pod \"nmstate-metrics-54757c584b-k2w8h\" (UID: \"20770427-fb79-4ce3-b2c7-1914a3e7c366\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.448411 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.453564 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-xz7xx"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.454217 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546195 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-dbus-socket\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546275 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-nmstate-lock\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546353 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6ppf\" (UniqueName: \"kubernetes.io/projected/288106d1-959b-4002-b615-77452081552a-kube-api-access-z6ppf\") pod \"nmstate-webhook-8474b5b9d8-7v24g\" (UID: \"288106d1-959b-4002-b615-77452081552a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546383 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-ovs-socket\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546419 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvl4j\" (UniqueName: \"kubernetes.io/projected/20770427-fb79-4ce3-b2c7-1914a3e7c366-kube-api-access-rvl4j\") pod \"nmstate-metrics-54757c584b-k2w8h\" (UID: \"20770427-fb79-4ce3-b2c7-1914a3e7c366\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546612 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/288106d1-959b-4002-b615-77452081552a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7v24g\" (UID: \"288106d1-959b-4002-b615-77452081552a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.546653 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpxd\" (UniqueName: \"kubernetes.io/projected/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-kube-api-access-xrpxd\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.570352 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.572018 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.578123 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.578808 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-98nmq" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.579121 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.588435 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvl4j\" (UniqueName: \"kubernetes.io/projected/20770427-fb79-4ce3-b2c7-1914a3e7c366-kube-api-access-rvl4j\") pod \"nmstate-metrics-54757c584b-k2w8h\" (UID: \"20770427-fb79-4ce3-b2c7-1914a3e7c366\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.596035 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.647906 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6ppf\" (UniqueName: \"kubernetes.io/projected/288106d1-959b-4002-b615-77452081552a-kube-api-access-z6ppf\") pod \"nmstate-webhook-8474b5b9d8-7v24g\" (UID: \"288106d1-959b-4002-b615-77452081552a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.647975 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/46c5b784-3baa-414b-b777-d2935b3d2056-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648006 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-ovs-socket\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648086 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/288106d1-959b-4002-b615-77452081552a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7v24g\" (UID: \"288106d1-959b-4002-b615-77452081552a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648111 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpxd\" (UniqueName: \"kubernetes.io/projected/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-kube-api-access-xrpxd\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648145 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c5b784-3baa-414b-b777-d2935b3d2056-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648168 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-dbus-socket\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648167 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-ovs-socket\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648210 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r6s5\" (UniqueName: \"kubernetes.io/projected/46c5b784-3baa-414b-b777-d2935b3d2056-kube-api-access-6r6s5\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-nmstate-lock\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648336 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-nmstate-lock\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.648533 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-dbus-socket\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.651366 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/288106d1-959b-4002-b615-77452081552a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-7v24g\" (UID: \"288106d1-959b-4002-b615-77452081552a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.665437 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb601f1b-4470-4b22-99e0-56f4e2590282" path="/var/lib/kubelet/pods/bb601f1b-4470-4b22-99e0-56f4e2590282/volumes" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.665762 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpxd\" (UniqueName: \"kubernetes.io/projected/7ee9761c-0ccc-47f6-bea9-bb66195bf7a8-kube-api-access-xrpxd\") pod \"nmstate-handler-xz7xx\" (UID: \"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8\") " pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.668181 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6ppf\" (UniqueName: \"kubernetes.io/projected/288106d1-959b-4002-b615-77452081552a-kube-api-access-z6ppf\") pod \"nmstate-webhook-8474b5b9d8-7v24g\" (UID: \"288106d1-959b-4002-b615-77452081552a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.728242 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.746867 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.751221 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c5b784-3baa-414b-b777-d2935b3d2056-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.751269 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r6s5\" (UniqueName: \"kubernetes.io/projected/46c5b784-3baa-414b-b777-d2935b3d2056-kube-api-access-6r6s5\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.751306 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/46c5b784-3baa-414b-b777-d2935b3d2056-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.751998 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/46c5b784-3baa-414b-b777-d2935b3d2056-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.761550 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/46c5b784-3baa-414b-b777-d2935b3d2056-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.772053 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r6s5\" (UniqueName: \"kubernetes.io/projected/46c5b784-3baa-414b-b777-d2935b3d2056-kube-api-access-6r6s5\") pod \"nmstate-console-plugin-7754f76f8b-h2qxn\" (UID: \"46c5b784-3baa-414b-b777-d2935b3d2056\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.776565 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.781564 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-796f78c94d-9qxd4"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.782476 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.799915 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-796f78c94d-9qxd4"] Jan 22 12:08:00 crc kubenswrapper[4773]: W0122 12:08:00.829009 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ee9761c_0ccc_47f6_bea9_bb66195bf7a8.slice/crio-2d262cd634a5552d0bd447cdd2c2bdb986aeaa2c38a645d0e54c7ac9814f9bb9 WatchSource:0}: Error finding container 2d262cd634a5552d0bd447cdd2c2bdb986aeaa2c38a645d0e54c7ac9814f9bb9: Status 404 returned error can't find the container with id 2d262cd634a5552d0bd447cdd2c2bdb986aeaa2c38a645d0e54c7ac9814f9bb9 Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852225 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-oauth-serving-cert\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852272 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4mh7\" (UniqueName: \"kubernetes.io/projected/8b92330a-6db4-49c8-88c2-c9d386a9795c-kube-api-access-c4mh7\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852321 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-oauth-config\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852338 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-service-ca\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852358 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-serving-cert\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852382 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-trusted-ca-bundle\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.852400 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-config\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.913933 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953482 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-oauth-serving-cert\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953547 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4mh7\" (UniqueName: \"kubernetes.io/projected/8b92330a-6db4-49c8-88c2-c9d386a9795c-kube-api-access-c4mh7\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953586 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-oauth-config\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953608 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-service-ca\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953634 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-serving-cert\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953672 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-trusted-ca-bundle\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.953692 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-config\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.954756 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-config\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.955855 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-trusted-ca-bundle\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.956563 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-service-ca\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.960848 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-oauth-config\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.961139 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8b92330a-6db4-49c8-88c2-c9d386a9795c-console-serving-cert\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.961360 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8b92330a-6db4-49c8-88c2-c9d386a9795c-oauth-serving-cert\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.979450 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-k2w8h"] Jan 22 12:08:00 crc kubenswrapper[4773]: I0122 12:08:00.981263 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4mh7\" (UniqueName: \"kubernetes.io/projected/8b92330a-6db4-49c8-88c2-c9d386a9795c-kube-api-access-c4mh7\") pod \"console-796f78c94d-9qxd4\" (UID: \"8b92330a-6db4-49c8-88c2-c9d386a9795c\") " pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:00 crc kubenswrapper[4773]: W0122 12:08:00.989955 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20770427_fb79_4ce3_b2c7_1914a3e7c366.slice/crio-a543f1b9e463f780d2fe0d0b95a2c0fd158e4f4529b67d3c57cb53f92eb00b8f WatchSource:0}: Error finding container a543f1b9e463f780d2fe0d0b95a2c0fd158e4f4529b67d3c57cb53f92eb00b8f: Status 404 returned error can't find the container with id a543f1b9e463f780d2fe0d0b95a2c0fd158e4f4529b67d3c57cb53f92eb00b8f Jan 22 12:08:01 crc kubenswrapper[4773]: I0122 12:08:01.041222 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g"] Jan 22 12:08:01 crc kubenswrapper[4773]: W0122 12:08:01.047250 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod288106d1_959b_4002_b615_77452081552a.slice/crio-5d2537d9e68426c80bf8931257ac110e9b9bdfaf7e2d8f8ccd7a89dbebf43efd WatchSource:0}: Error finding container 5d2537d9e68426c80bf8931257ac110e9b9bdfaf7e2d8f8ccd7a89dbebf43efd: Status 404 returned error can't find the container with id 5d2537d9e68426c80bf8931257ac110e9b9bdfaf7e2d8f8ccd7a89dbebf43efd Jan 22 12:08:01 crc kubenswrapper[4773]: I0122 12:08:01.054256 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" event={"ID":"20770427-fb79-4ce3-b2c7-1914a3e7c366","Type":"ContainerStarted","Data":"a543f1b9e463f780d2fe0d0b95a2c0fd158e4f4529b67d3c57cb53f92eb00b8f"} Jan 22 12:08:01 crc kubenswrapper[4773]: I0122 12:08:01.055705 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xz7xx" event={"ID":"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8","Type":"ContainerStarted","Data":"2d262cd634a5552d0bd447cdd2c2bdb986aeaa2c38a645d0e54c7ac9814f9bb9"} Jan 22 12:08:01 crc kubenswrapper[4773]: I0122 12:08:01.106895 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:01 crc kubenswrapper[4773]: I0122 12:08:01.151458 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn"] Jan 22 12:08:01 crc kubenswrapper[4773]: I0122 12:08:01.286507 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-796f78c94d-9qxd4"] Jan 22 12:08:01 crc kubenswrapper[4773]: W0122 12:08:01.290422 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b92330a_6db4_49c8_88c2_c9d386a9795c.slice/crio-b728c965e9ab557c141c7bbf60953fa50498b346249ee2b75aabcc2078bce73e WatchSource:0}: Error finding container b728c965e9ab557c141c7bbf60953fa50498b346249ee2b75aabcc2078bce73e: Status 404 returned error can't find the container with id b728c965e9ab557c141c7bbf60953fa50498b346249ee2b75aabcc2078bce73e Jan 22 12:08:02 crc kubenswrapper[4773]: I0122 12:08:02.063050 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" event={"ID":"288106d1-959b-4002-b615-77452081552a","Type":"ContainerStarted","Data":"5d2537d9e68426c80bf8931257ac110e9b9bdfaf7e2d8f8ccd7a89dbebf43efd"} Jan 22 12:08:02 crc kubenswrapper[4773]: I0122 12:08:02.064310 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" event={"ID":"46c5b784-3baa-414b-b777-d2935b3d2056","Type":"ContainerStarted","Data":"92b117fb247c01d92e62a8c214b588e0b55d99975eca735f45119c4d9d148488"} Jan 22 12:08:02 crc kubenswrapper[4773]: I0122 12:08:02.066173 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-796f78c94d-9qxd4" event={"ID":"8b92330a-6db4-49c8-88c2-c9d386a9795c","Type":"ContainerStarted","Data":"651e62d5d606c65e7a01c8a10909fddff2df4c21599d9db041aa5529dc0fd0e9"} Jan 22 12:08:02 crc kubenswrapper[4773]: I0122 12:08:02.066205 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-796f78c94d-9qxd4" event={"ID":"8b92330a-6db4-49c8-88c2-c9d386a9795c","Type":"ContainerStarted","Data":"b728c965e9ab557c141c7bbf60953fa50498b346249ee2b75aabcc2078bce73e"} Jan 22 12:08:02 crc kubenswrapper[4773]: I0122 12:08:02.083438 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-796f78c94d-9qxd4" podStartSLOduration=2.083406709 podStartE2EDuration="2.083406709s" podCreationTimestamp="2026-01-22 12:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:08:02.082436951 +0000 UTC m=+789.660552776" watchObservedRunningTime="2026-01-22 12:08:02.083406709 +0000 UTC m=+789.661522534" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.074155 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.074848 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.074905 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.075559 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7882b6d2596720092fbfa9cbf9f782df49bd4b06ac71c0c20c36688e60fbb228"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.075617 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://7882b6d2596720092fbfa9cbf9f782df49bd4b06ac71c0c20c36688e60fbb228" gracePeriod=600 Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.088026 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" event={"ID":"20770427-fb79-4ce3-b2c7-1914a3e7c366","Type":"ContainerStarted","Data":"92787cd82ddb0d3fd5a5d85eecc2f3809454af6d5dd38c5b52bfafcf65772e46"} Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.089241 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xz7xx" event={"ID":"7ee9761c-0ccc-47f6-bea9-bb66195bf7a8","Type":"ContainerStarted","Data":"94e08cb6f5a17ebdf681faa4bd34136ecd1175c8b6ad998a1246fb9d94f8e11e"} Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.089469 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.092647 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" event={"ID":"288106d1-959b-4002-b615-77452081552a","Type":"ContainerStarted","Data":"32ee73509584e931c0b826145db15ce6f4c98e309a754aa42159a09c2dbe2306"} Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.093352 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.095436 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" event={"ID":"46c5b784-3baa-414b-b777-d2935b3d2056","Type":"ContainerStarted","Data":"5b7e4b9f73348db88abf85c27452355a5f7eda750defa19e67fdb4a9a66d49cf"} Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.111177 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-xz7xx" podStartSLOduration=1.353641649 podStartE2EDuration="4.111159543s" podCreationTimestamp="2026-01-22 12:08:00 +0000 UTC" firstStartedPulling="2026-01-22 12:08:00.831126013 +0000 UTC m=+788.409241838" lastFinishedPulling="2026-01-22 12:08:03.588643897 +0000 UTC m=+791.166759732" observedRunningTime="2026-01-22 12:08:04.104362741 +0000 UTC m=+791.682478586" watchObservedRunningTime="2026-01-22 12:08:04.111159543 +0000 UTC m=+791.689275368" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.121165 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-h2qxn" podStartSLOduration=1.694383358 podStartE2EDuration="4.121150005s" podCreationTimestamp="2026-01-22 12:08:00 +0000 UTC" firstStartedPulling="2026-01-22 12:08:01.160756209 +0000 UTC m=+788.738872034" lastFinishedPulling="2026-01-22 12:08:03.587522856 +0000 UTC m=+791.165638681" observedRunningTime="2026-01-22 12:08:04.11955323 +0000 UTC m=+791.697669065" watchObservedRunningTime="2026-01-22 12:08:04.121150005 +0000 UTC m=+791.699265840" Jan 22 12:08:04 crc kubenswrapper[4773]: I0122 12:08:04.146454 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" podStartSLOduration=1.608237369 podStartE2EDuration="4.146432568s" podCreationTimestamp="2026-01-22 12:08:00 +0000 UTC" firstStartedPulling="2026-01-22 12:08:01.049315586 +0000 UTC m=+788.627431411" lastFinishedPulling="2026-01-22 12:08:03.587510785 +0000 UTC m=+791.165626610" observedRunningTime="2026-01-22 12:08:04.140775108 +0000 UTC m=+791.718890953" watchObservedRunningTime="2026-01-22 12:08:04.146432568 +0000 UTC m=+791.724548403" Jan 22 12:08:05 crc kubenswrapper[4773]: I0122 12:08:05.103058 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="7882b6d2596720092fbfa9cbf9f782df49bd4b06ac71c0c20c36688e60fbb228" exitCode=0 Jan 22 12:08:05 crc kubenswrapper[4773]: I0122 12:08:05.103157 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"7882b6d2596720092fbfa9cbf9f782df49bd4b06ac71c0c20c36688e60fbb228"} Jan 22 12:08:05 crc kubenswrapper[4773]: I0122 12:08:05.103776 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"a884f8e7ecd2462a2aab65c47dccafce52794662c52b09332c27098b394743b2"} Jan 22 12:08:05 crc kubenswrapper[4773]: I0122 12:08:05.103816 4773 scope.go:117] "RemoveContainer" containerID="5b191b48671b70945f5bd04770a902a07ca9065798b07b504fd658fdbc68a485" Jan 22 12:08:07 crc kubenswrapper[4773]: I0122 12:08:07.123656 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" event={"ID":"20770427-fb79-4ce3-b2c7-1914a3e7c366","Type":"ContainerStarted","Data":"9bc464b1ad702e0ca12dc47022b007560ea233ea432ca02e594416b00c501ccd"} Jan 22 12:08:07 crc kubenswrapper[4773]: I0122 12:08:07.142685 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-k2w8h" podStartSLOduration=2.035340751 podStartE2EDuration="7.142669024s" podCreationTimestamp="2026-01-22 12:08:00 +0000 UTC" firstStartedPulling="2026-01-22 12:08:00.993874752 +0000 UTC m=+788.571990577" lastFinishedPulling="2026-01-22 12:08:06.101203025 +0000 UTC m=+793.679318850" observedRunningTime="2026-01-22 12:08:07.14074813 +0000 UTC m=+794.718863955" watchObservedRunningTime="2026-01-22 12:08:07.142669024 +0000 UTC m=+794.720784849" Jan 22 12:08:10 crc kubenswrapper[4773]: I0122 12:08:10.797146 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-xz7xx" Jan 22 12:08:11 crc kubenswrapper[4773]: I0122 12:08:11.107852 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:11 crc kubenswrapper[4773]: I0122 12:08:11.107902 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:11 crc kubenswrapper[4773]: I0122 12:08:11.112403 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:11 crc kubenswrapper[4773]: I0122 12:08:11.152886 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-796f78c94d-9qxd4" Jan 22 12:08:11 crc kubenswrapper[4773]: I0122 12:08:11.199739 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-lrxk6"] Jan 22 12:08:20 crc kubenswrapper[4773]: I0122 12:08:20.754739 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-7v24g" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.544206 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs"] Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.545993 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.548279 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.555319 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs"] Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.557098 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.557950 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2fw4\" (UniqueName: \"kubernetes.io/projected/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-kube-api-access-h2fw4\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.558122 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.658483 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2fw4\" (UniqueName: \"kubernetes.io/projected/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-kube-api-access-h2fw4\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.658538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.658607 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.659159 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.659158 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.678012 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2fw4\" (UniqueName: \"kubernetes.io/projected/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-kube-api-access-h2fw4\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:32 crc kubenswrapper[4773]: I0122 12:08:32.864662 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:33 crc kubenswrapper[4773]: I0122 12:08:33.260589 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs"] Jan 22 12:08:33 crc kubenswrapper[4773]: I0122 12:08:33.282448 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" event={"ID":"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf","Type":"ContainerStarted","Data":"b84cad00c16fefde18a0c95d42faac7c5cc652a3f15ce26b1b0a4301ecf65b8b"} Jan 22 12:08:34 crc kubenswrapper[4773]: I0122 12:08:34.293824 4773 generic.go:334] "Generic (PLEG): container finished" podID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerID="d448424219286423bee3204ba1783ee141ab4eb57dd0329d13fcaf5876447d62" exitCode=0 Jan 22 12:08:34 crc kubenswrapper[4773]: I0122 12:08:34.293880 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" event={"ID":"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf","Type":"ContainerDied","Data":"d448424219286423bee3204ba1783ee141ab4eb57dd0329d13fcaf5876447d62"} Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.252387 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-lrxk6" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerName="console" containerID="cri-o://25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475" gracePeriod=15 Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.318264 4773 generic.go:334] "Generic (PLEG): container finished" podID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerID="3f6716a501412f8a28d553569ef9c4c5638d29110eaf23b09c2b39f4092a996d" exitCode=0 Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.318365 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" event={"ID":"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf","Type":"ContainerDied","Data":"3f6716a501412f8a28d553569ef9c4c5638d29110eaf23b09c2b39f4092a996d"} Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.699246 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-lrxk6_139ce7fa-7d44-4233-b4e9-b7827ce4c68d/console/0.log" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.699548 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820415 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-trusted-ca-bundle\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820494 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-oauth-serving-cert\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820553 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-config\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820599 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-serving-cert\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820659 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-oauth-config\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820684 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-service-ca\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.820742 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t98h\" (UniqueName: \"kubernetes.io/projected/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-kube-api-access-7t98h\") pod \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\" (UID: \"139ce7fa-7d44-4233-b4e9-b7827ce4c68d\") " Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.821414 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.821477 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.822253 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-config" (OuterVolumeSpecName: "console-config") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.822264 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-service-ca" (OuterVolumeSpecName: "service-ca") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.833811 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.834259 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-kube-api-access-7t98h" (OuterVolumeSpecName: "kube-api-access-7t98h") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "kube-api-access-7t98h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.834496 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "139ce7fa-7d44-4233-b4e9-b7827ce4c68d" (UID: "139ce7fa-7d44-4233-b4e9-b7827ce4c68d"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922424 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t98h\" (UniqueName: \"kubernetes.io/projected/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-kube-api-access-7t98h\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922518 4773 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922537 4773 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922551 4773 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922562 4773 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922575 4773 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:36 crc kubenswrapper[4773]: I0122 12:08:36.922585 4773 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/139ce7fa-7d44-4233-b4e9-b7827ce4c68d-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.325584 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-lrxk6_139ce7fa-7d44-4233-b4e9-b7827ce4c68d/console/0.log" Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.325675 4773 generic.go:334] "Generic (PLEG): container finished" podID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerID="25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475" exitCode=2 Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.325737 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lrxk6" event={"ID":"139ce7fa-7d44-4233-b4e9-b7827ce4c68d","Type":"ContainerDied","Data":"25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475"} Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.325768 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lrxk6" event={"ID":"139ce7fa-7d44-4233-b4e9-b7827ce4c68d","Type":"ContainerDied","Data":"03d13dfb3744909c7dd0be8312429d158b675a67abf385b8404311d32139a8ca"} Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.325770 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lrxk6" Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.325786 4773 scope.go:117] "RemoveContainer" containerID="25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475" Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.329801 4773 generic.go:334] "Generic (PLEG): container finished" podID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerID="0dcf995601c95dd496e5c0e54958f394cc0732bcbb0543f4ecfc47a654d0528a" exitCode=0 Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.330073 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" event={"ID":"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf","Type":"ContainerDied","Data":"0dcf995601c95dd496e5c0e54958f394cc0732bcbb0543f4ecfc47a654d0528a"} Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.346561 4773 scope.go:117] "RemoveContainer" containerID="25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475" Jan 22 12:08:37 crc kubenswrapper[4773]: E0122 12:08:37.348102 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475\": container with ID starting with 25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475 not found: ID does not exist" containerID="25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475" Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.348136 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475"} err="failed to get container status \"25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475\": rpc error: code = NotFound desc = could not find container \"25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475\": container with ID starting with 25bcff196e82020861ec33ee875e8d788e1b8f446955453f4246695e6d8da475 not found: ID does not exist" Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.372219 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-lrxk6"] Jan 22 12:08:37 crc kubenswrapper[4773]: I0122 12:08:37.375745 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-lrxk6"] Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.582649 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.645962 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2fw4\" (UniqueName: \"kubernetes.io/projected/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-kube-api-access-h2fw4\") pod \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.646021 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-bundle\") pod \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.646059 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-util\") pod \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\" (UID: \"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf\") " Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.647124 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-bundle" (OuterVolumeSpecName: "bundle") pod "d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" (UID: "d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.652148 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-kube-api-access-h2fw4" (OuterVolumeSpecName: "kube-api-access-h2fw4") pod "d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" (UID: "d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf"). InnerVolumeSpecName "kube-api-access-h2fw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.661254 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-util" (OuterVolumeSpecName: "util") pod "d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" (UID: "d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.666551 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" path="/var/lib/kubelet/pods/139ce7fa-7d44-4233-b4e9-b7827ce4c68d/volumes" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.747610 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2fw4\" (UniqueName: \"kubernetes.io/projected/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-kube-api-access-h2fw4\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.747667 4773 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:38 crc kubenswrapper[4773]: I0122 12:08:38.747689 4773 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf-util\") on node \"crc\" DevicePath \"\"" Jan 22 12:08:39 crc kubenswrapper[4773]: I0122 12:08:39.349260 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" Jan 22 12:08:39 crc kubenswrapper[4773]: I0122 12:08:39.349253 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs" event={"ID":"d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf","Type":"ContainerDied","Data":"b84cad00c16fefde18a0c95d42faac7c5cc652a3f15ce26b1b0a4301ecf65b8b"} Jan 22 12:08:39 crc kubenswrapper[4773]: I0122 12:08:39.349434 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b84cad00c16fefde18a0c95d42faac7c5cc652a3f15ce26b1b0a4301ecf65b8b" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.730957 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh"] Jan 22 12:08:47 crc kubenswrapper[4773]: E0122 12:08:47.731835 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="util" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.731850 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="util" Jan 22 12:08:47 crc kubenswrapper[4773]: E0122 12:08:47.731862 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="pull" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.731869 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="pull" Jan 22 12:08:47 crc kubenswrapper[4773]: E0122 12:08:47.731884 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="extract" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.731891 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="extract" Jan 22 12:08:47 crc kubenswrapper[4773]: E0122 12:08:47.731905 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerName="console" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.731912 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerName="console" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.732045 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf" containerName="extract" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.732057 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="139ce7fa-7d44-4233-b4e9-b7827ce4c68d" containerName="console" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.732560 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.735838 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.735884 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.737716 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.737747 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.737803 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-jwjn9" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.748785 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh"] Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.926939 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6m7w\" (UniqueName: \"kubernetes.io/projected/acc4b660-fb70-4713-ba23-597608298bc1-kube-api-access-b6m7w\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.927000 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/acc4b660-fb70-4713-ba23-597608298bc1-apiservice-cert\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.927026 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/acc4b660-fb70-4713-ba23-597608298bc1-webhook-cert\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.976710 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c"] Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.977440 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.981870 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-dzk8d" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.981914 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.984140 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 22 12:08:47 crc kubenswrapper[4773]: I0122 12:08:47.995891 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c"] Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.027982 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6m7w\" (UniqueName: \"kubernetes.io/projected/acc4b660-fb70-4713-ba23-597608298bc1-kube-api-access-b6m7w\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.028055 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/acc4b660-fb70-4713-ba23-597608298bc1-apiservice-cert\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.028092 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/acc4b660-fb70-4713-ba23-597608298bc1-webhook-cert\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.036661 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/acc4b660-fb70-4713-ba23-597608298bc1-webhook-cert\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.051144 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6m7w\" (UniqueName: \"kubernetes.io/projected/acc4b660-fb70-4713-ba23-597608298bc1-kube-api-access-b6m7w\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.055125 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/acc4b660-fb70-4713-ba23-597608298bc1-apiservice-cert\") pod \"metallb-operator-controller-manager-75c7758c8d-jv4lh\" (UID: \"acc4b660-fb70-4713-ba23-597608298bc1\") " pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.129172 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj2p8\" (UniqueName: \"kubernetes.io/projected/3ada08ed-114c-4b1f-9110-9d4d60edaa27-kube-api-access-kj2p8\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.129276 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ada08ed-114c-4b1f-9110-9d4d60edaa27-webhook-cert\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.129325 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ada08ed-114c-4b1f-9110-9d4d60edaa27-apiservice-cert\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.230984 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj2p8\" (UniqueName: \"kubernetes.io/projected/3ada08ed-114c-4b1f-9110-9d4d60edaa27-kube-api-access-kj2p8\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.231086 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ada08ed-114c-4b1f-9110-9d4d60edaa27-webhook-cert\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.231114 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ada08ed-114c-4b1f-9110-9d4d60edaa27-apiservice-cert\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.236171 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ada08ed-114c-4b1f-9110-9d4d60edaa27-webhook-cert\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.242758 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ada08ed-114c-4b1f-9110-9d4d60edaa27-apiservice-cert\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.253143 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj2p8\" (UniqueName: \"kubernetes.io/projected/3ada08ed-114c-4b1f-9110-9d4d60edaa27-kube-api-access-kj2p8\") pod \"metallb-operator-webhook-server-789dcb66b5-4k49c\" (UID: \"3ada08ed-114c-4b1f-9110-9d4d60edaa27\") " pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.292505 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:48 crc kubenswrapper[4773]: I0122 12:08:48.348732 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:49 crc kubenswrapper[4773]: I0122 12:08:49.092177 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c"] Jan 22 12:08:49 crc kubenswrapper[4773]: I0122 12:08:49.444963 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh"] Jan 22 12:08:49 crc kubenswrapper[4773]: I0122 12:08:49.520880 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" event={"ID":"acc4b660-fb70-4713-ba23-597608298bc1","Type":"ContainerStarted","Data":"24617839c66f63619aa7f29e50f55949e3137e9a0b53b339c5b4ea416767e0a1"} Jan 22 12:08:49 crc kubenswrapper[4773]: I0122 12:08:49.522052 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" event={"ID":"3ada08ed-114c-4b1f-9110-9d4d60edaa27","Type":"ContainerStarted","Data":"cd10c4b5d8eba1f19305ba0c6e3d424e092d0d53eed90327484bc918f71f49f7"} Jan 22 12:08:57 crc kubenswrapper[4773]: I0122 12:08:57.587616 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" event={"ID":"acc4b660-fb70-4713-ba23-597608298bc1","Type":"ContainerStarted","Data":"372349099e586f3d1d3f4534bf3ca8deedbbe89a93c4537fcd97de562b0f4674"} Jan 22 12:08:57 crc kubenswrapper[4773]: I0122 12:08:57.588378 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:08:57 crc kubenswrapper[4773]: I0122 12:08:57.589875 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" event={"ID":"3ada08ed-114c-4b1f-9110-9d4d60edaa27","Type":"ContainerStarted","Data":"e7340247a027fdbde53f1e49425e514804a7a90eb6ab16b05f21016166152877"} Jan 22 12:08:57 crc kubenswrapper[4773]: I0122 12:08:57.590316 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:08:57 crc kubenswrapper[4773]: I0122 12:08:57.616522 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" podStartSLOduration=2.659815726 podStartE2EDuration="10.616500408s" podCreationTimestamp="2026-01-22 12:08:47 +0000 UTC" firstStartedPulling="2026-01-22 12:08:49.452556671 +0000 UTC m=+837.030672496" lastFinishedPulling="2026-01-22 12:08:57.409241353 +0000 UTC m=+844.987357178" observedRunningTime="2026-01-22 12:08:57.615031776 +0000 UTC m=+845.193147611" watchObservedRunningTime="2026-01-22 12:08:57.616500408 +0000 UTC m=+845.194616223" Jan 22 12:08:57 crc kubenswrapper[4773]: I0122 12:08:57.640663 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" podStartSLOduration=2.321701071 podStartE2EDuration="10.640639766s" podCreationTimestamp="2026-01-22 12:08:47 +0000 UTC" firstStartedPulling="2026-01-22 12:08:49.108782027 +0000 UTC m=+836.686897852" lastFinishedPulling="2026-01-22 12:08:57.427720722 +0000 UTC m=+845.005836547" observedRunningTime="2026-01-22 12:08:57.636582092 +0000 UTC m=+845.214697937" watchObservedRunningTime="2026-01-22 12:08:57.640639766 +0000 UTC m=+845.218755591" Jan 22 12:09:08 crc kubenswrapper[4773]: I0122 12:09:08.298893 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-789dcb66b5-4k49c" Jan 22 12:09:28 crc kubenswrapper[4773]: I0122 12:09:28.351210 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-75c7758c8d-jv4lh" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.178867 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-28242"] Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.181416 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.182972 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq"] Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.183820 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.184370 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.185446 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-cj2kc" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.186492 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.186507 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.209123 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq"] Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.292990 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-startup\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293041 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics-certs\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293089 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz5j5\" (UniqueName: \"kubernetes.io/projected/941adce7-fcb2-4191-920a-e9279f9ac0db-kube-api-access-qz5j5\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293135 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-conf\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293155 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293169 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsfcl\" (UniqueName: \"kubernetes.io/projected/574153bd-b3c2-4a73-81d2-15b68de060ca-kube-api-access-bsfcl\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293184 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/941adce7-fcb2-4191-920a-e9279f9ac0db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293204 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-sockets\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.293225 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-reloader\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.299569 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-8d28f"] Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.300765 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.302907 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.302932 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.303570 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.310462 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-4zbfd" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.325864 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-7vtfm"] Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.326940 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.329154 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.339102 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-7vtfm"] Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.393870 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-sockets\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.393924 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metrics-certs\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.393957 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-reloader\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.393984 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394042 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-startup\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394062 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics-certs\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394083 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l74g\" (UniqueName: \"kubernetes.io/projected/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-kube-api-access-9l74g\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394126 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz5j5\" (UniqueName: \"kubernetes.io/projected/941adce7-fcb2-4191-920a-e9279f9ac0db-kube-api-access-qz5j5\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394153 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-conf\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394167 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metallb-excludel2\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394187 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/941adce7-fcb2-4191-920a-e9279f9ac0db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394202 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394218 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsfcl\" (UniqueName: \"kubernetes.io/projected/574153bd-b3c2-4a73-81d2-15b68de060ca-kube-api-access-bsfcl\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-sockets\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394498 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-reloader\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.394634 4773 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.394684 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics-certs podName:574153bd-b3c2-4a73-81d2-15b68de060ca nodeName:}" failed. No retries permitted until 2026-01-22 12:09:29.894666067 +0000 UTC m=+877.472781982 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics-certs") pod "frr-k8s-28242" (UID: "574153bd-b3c2-4a73-81d2-15b68de060ca") : secret "frr-k8s-certs-secret" not found Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.394734 4773 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.394760 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/941adce7-fcb2-4191-920a-e9279f9ac0db-cert podName:941adce7-fcb2-4191-920a-e9279f9ac0db nodeName:}" failed. No retries permitted until 2026-01-22 12:09:29.894750199 +0000 UTC m=+877.472866034 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/941adce7-fcb2-4191-920a-e9279f9ac0db-cert") pod "frr-k8s-webhook-server-7df86c4f6c-s46dq" (UID: "941adce7-fcb2-4191-920a-e9279f9ac0db") : secret "frr-k8s-webhook-server-cert" not found Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394799 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-conf\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.394991 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.395342 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/574153bd-b3c2-4a73-81d2-15b68de060ca-frr-startup\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.418886 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsfcl\" (UniqueName: \"kubernetes.io/projected/574153bd-b3c2-4a73-81d2-15b68de060ca-kube-api-access-bsfcl\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.420018 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz5j5\" (UniqueName: \"kubernetes.io/projected/941adce7-fcb2-4191-920a-e9279f9ac0db-kube-api-access-qz5j5\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495082 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/161e16b2-16b1-4485-aa07-4f8a766d347f-metrics-certs\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495133 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metrics-certs\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495154 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495180 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8v9f\" (UniqueName: \"kubernetes.io/projected/161e16b2-16b1-4485-aa07-4f8a766d347f-kube-api-access-l8v9f\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.495262 4773 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.495322 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metrics-certs podName:8f8f8131-f668-4d0c-be50-21c1eb7b62ff nodeName:}" failed. No retries permitted until 2026-01-22 12:09:29.995308706 +0000 UTC m=+877.573424531 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metrics-certs") pod "speaker-8d28f" (UID: "8f8f8131-f668-4d0c-be50-21c1eb7b62ff") : secret "speaker-certs-secret" not found Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495320 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/161e16b2-16b1-4485-aa07-4f8a766d347f-cert\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.495370 4773 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 22 12:09:29 crc kubenswrapper[4773]: E0122 12:09:29.495494 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist podName:8f8f8131-f668-4d0c-be50-21c1eb7b62ff nodeName:}" failed. No retries permitted until 2026-01-22 12:09:29.99547291 +0000 UTC m=+877.573588805 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist") pod "speaker-8d28f" (UID: "8f8f8131-f668-4d0c-be50-21c1eb7b62ff") : secret "metallb-memberlist" not found Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495526 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l74g\" (UniqueName: \"kubernetes.io/projected/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-kube-api-access-9l74g\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.495665 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metallb-excludel2\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.496504 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metallb-excludel2\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.511856 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l74g\" (UniqueName: \"kubernetes.io/projected/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-kube-api-access-9l74g\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.597117 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/161e16b2-16b1-4485-aa07-4f8a766d347f-metrics-certs\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.597248 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8v9f\" (UniqueName: \"kubernetes.io/projected/161e16b2-16b1-4485-aa07-4f8a766d347f-kube-api-access-l8v9f\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.597313 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/161e16b2-16b1-4485-aa07-4f8a766d347f-cert\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.599103 4773 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.600427 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/161e16b2-16b1-4485-aa07-4f8a766d347f-metrics-certs\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.611928 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/161e16b2-16b1-4485-aa07-4f8a766d347f-cert\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.615369 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8v9f\" (UniqueName: \"kubernetes.io/projected/161e16b2-16b1-4485-aa07-4f8a766d347f-kube-api-access-l8v9f\") pod \"controller-6968d8fdc4-7vtfm\" (UID: \"161e16b2-16b1-4485-aa07-4f8a766d347f\") " pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.643264 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.900627 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics-certs\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.901258 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/941adce7-fcb2-4191-920a-e9279f9ac0db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.906444 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/574153bd-b3c2-4a73-81d2-15b68de060ca-metrics-certs\") pod \"frr-k8s-28242\" (UID: \"574153bd-b3c2-4a73-81d2-15b68de060ca\") " pod="metallb-system/frr-k8s-28242" Jan 22 12:09:29 crc kubenswrapper[4773]: I0122 12:09:29.909136 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/941adce7-fcb2-4191-920a-e9279f9ac0db-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-s46dq\" (UID: \"941adce7-fcb2-4191-920a-e9279f9ac0db\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.002918 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metrics-certs\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.002969 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:30 crc kubenswrapper[4773]: E0122 12:09:30.003228 4773 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 22 12:09:30 crc kubenswrapper[4773]: E0122 12:09:30.003345 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist podName:8f8f8131-f668-4d0c-be50-21c1eb7b62ff nodeName:}" failed. No retries permitted until 2026-01-22 12:09:31.003326244 +0000 UTC m=+878.581442069 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist") pod "speaker-8d28f" (UID: "8f8f8131-f668-4d0c-be50-21c1eb7b62ff") : secret "metallb-memberlist" not found Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.006739 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-metrics-certs\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.098368 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-28242" Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.106426 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.167733 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-7vtfm"] Jan 22 12:09:30 crc kubenswrapper[4773]: W0122 12:09:30.181751 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod161e16b2_16b1_4485_aa07_4f8a766d347f.slice/crio-e1d6e9ace2d4bd37c2c2ff9d05f589780311626ef3f7d4fa0a919abb3b393bf0 WatchSource:0}: Error finding container e1d6e9ace2d4bd37c2c2ff9d05f589780311626ef3f7d4fa0a919abb3b393bf0: Status 404 returned error can't find the container with id e1d6e9ace2d4bd37c2c2ff9d05f589780311626ef3f7d4fa0a919abb3b393bf0 Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.340383 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq"] Jan 22 12:09:30 crc kubenswrapper[4773]: W0122 12:09:30.347401 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod941adce7_fcb2_4191_920a_e9279f9ac0db.slice/crio-5804c31ff44bf5eda7aa0b166fa282f57d6dba81638d0d9263ec3a90d249da76 WatchSource:0}: Error finding container 5804c31ff44bf5eda7aa0b166fa282f57d6dba81638d0d9263ec3a90d249da76: Status 404 returned error can't find the container with id 5804c31ff44bf5eda7aa0b166fa282f57d6dba81638d0d9263ec3a90d249da76 Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.767848 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" event={"ID":"941adce7-fcb2-4191-920a-e9279f9ac0db","Type":"ContainerStarted","Data":"5804c31ff44bf5eda7aa0b166fa282f57d6dba81638d0d9263ec3a90d249da76"} Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.769579 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-7vtfm" event={"ID":"161e16b2-16b1-4485-aa07-4f8a766d347f","Type":"ContainerStarted","Data":"f0c2c519e2771b05cb9b7888b62b3ef0cce417d82b6ace577b4a82d6dfa5ec11"} Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.769650 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.769668 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-7vtfm" event={"ID":"161e16b2-16b1-4485-aa07-4f8a766d347f","Type":"ContainerStarted","Data":"f3af1cef7579d58dac4e999935b669c6345436b5e79525e7e5b3a16a4ce57358"} Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.769681 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-7vtfm" event={"ID":"161e16b2-16b1-4485-aa07-4f8a766d347f","Type":"ContainerStarted","Data":"e1d6e9ace2d4bd37c2c2ff9d05f589780311626ef3f7d4fa0a919abb3b393bf0"} Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.770499 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"498e9e22291080d242ec828a7ca73500cfbf68f1d15ad7992883398b6ba66b19"} Jan 22 12:09:30 crc kubenswrapper[4773]: I0122 12:09:30.798678 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-7vtfm" podStartSLOduration=1.798657636 podStartE2EDuration="1.798657636s" podCreationTimestamp="2026-01-22 12:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:09:30.791800604 +0000 UTC m=+878.369916439" watchObservedRunningTime="2026-01-22 12:09:30.798657636 +0000 UTC m=+878.376773461" Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.015770 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.021464 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8f8f8131-f668-4d0c-be50-21c1eb7b62ff-memberlist\") pod \"speaker-8d28f\" (UID: \"8f8f8131-f668-4d0c-be50-21c1eb7b62ff\") " pod="metallb-system/speaker-8d28f" Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.113258 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-8d28f" Jan 22 12:09:31 crc kubenswrapper[4773]: W0122 12:09:31.134853 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f8f8131_f668_4d0c_be50_21c1eb7b62ff.slice/crio-dda7b39a14b137d9c4e1e708873be6391aa624412b69a76bec1f7e156c31ac3f WatchSource:0}: Error finding container dda7b39a14b137d9c4e1e708873be6391aa624412b69a76bec1f7e156c31ac3f: Status 404 returned error can't find the container with id dda7b39a14b137d9c4e1e708873be6391aa624412b69a76bec1f7e156c31ac3f Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.778216 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8d28f" event={"ID":"8f8f8131-f668-4d0c-be50-21c1eb7b62ff","Type":"ContainerStarted","Data":"6fbffbb214af5d7a4b0c89f05ec2dd934a999e0ce7c6ca0c32018a13bfb47f57"} Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.778538 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8d28f" event={"ID":"8f8f8131-f668-4d0c-be50-21c1eb7b62ff","Type":"ContainerStarted","Data":"5ece66115792f949301b0f0cf07c8c85ea5123bbeec41272d3262653715b73fb"} Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.778551 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-8d28f" event={"ID":"8f8f8131-f668-4d0c-be50-21c1eb7b62ff","Type":"ContainerStarted","Data":"dda7b39a14b137d9c4e1e708873be6391aa624412b69a76bec1f7e156c31ac3f"} Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.779216 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-8d28f" Jan 22 12:09:31 crc kubenswrapper[4773]: I0122 12:09:31.822832 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-8d28f" podStartSLOduration=2.822810081 podStartE2EDuration="2.822810081s" podCreationTimestamp="2026-01-22 12:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:09:31.816968096 +0000 UTC m=+879.395083921" watchObservedRunningTime="2026-01-22 12:09:31.822810081 +0000 UTC m=+879.400925906" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.208364 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bbn9r"] Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.210450 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bbn9r"] Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.210557 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.285707 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr6wq\" (UniqueName: \"kubernetes.io/projected/6d9036ea-93ec-4784-a703-2a4db61c0f23-kube-api-access-gr6wq\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.285772 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-catalog-content\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.285813 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-utilities\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.386815 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr6wq\" (UniqueName: \"kubernetes.io/projected/6d9036ea-93ec-4784-a703-2a4db61c0f23-kube-api-access-gr6wq\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.386857 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-catalog-content\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.386888 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-utilities\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.387256 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-utilities\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.387628 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-catalog-content\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.417832 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr6wq\" (UniqueName: \"kubernetes.io/projected/6d9036ea-93ec-4784-a703-2a4db61c0f23-kube-api-access-gr6wq\") pod \"community-operators-bbn9r\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:38 crc kubenswrapper[4773]: I0122 12:09:38.533856 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:40 crc kubenswrapper[4773]: I0122 12:09:40.739573 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-759fp"] Jan 22 12:09:40 crc kubenswrapper[4773]: I0122 12:09:40.742202 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:40 crc kubenswrapper[4773]: I0122 12:09:40.757823 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-759fp"] Jan 22 12:09:40 crc kubenswrapper[4773]: I0122 12:09:40.940561 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-catalog-content\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:40 crc kubenswrapper[4773]: I0122 12:09:40.940707 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjfc9\" (UniqueName: \"kubernetes.io/projected/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-kube-api-access-wjfc9\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:40 crc kubenswrapper[4773]: I0122 12:09:40.940776 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-utilities\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.042370 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjfc9\" (UniqueName: \"kubernetes.io/projected/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-kube-api-access-wjfc9\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.042486 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-utilities\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.042526 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-catalog-content\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.043217 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-catalog-content\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.043348 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-utilities\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.060430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjfc9\" (UniqueName: \"kubernetes.io/projected/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-kube-api-access-wjfc9\") pod \"redhat-marketplace-759fp\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.072390 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:41 crc kubenswrapper[4773]: I0122 12:09:41.116801 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-8d28f" Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.057114 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-759fp"] Jan 22 12:09:42 crc kubenswrapper[4773]: W0122 12:09:42.068227 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8ace3fd_01d5_4b18_b4a6_b8ad7b72c9ba.slice/crio-22b7d1e0b9d11106bbfb0a586aab40a2cb5b5afefb0fd65fc68fdcda6ac607e2 WatchSource:0}: Error finding container 22b7d1e0b9d11106bbfb0a586aab40a2cb5b5afefb0fd65fc68fdcda6ac607e2: Status 404 returned error can't find the container with id 22b7d1e0b9d11106bbfb0a586aab40a2cb5b5afefb0fd65fc68fdcda6ac607e2 Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.217151 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bbn9r"] Jan 22 12:09:42 crc kubenswrapper[4773]: W0122 12:09:42.224220 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d9036ea_93ec_4784_a703_2a4db61c0f23.slice/crio-d63516f6c1b8a0b2d8fe82dbfbfba719f7e8134f331f343ebf451638f7d08516 WatchSource:0}: Error finding container d63516f6c1b8a0b2d8fe82dbfbfba719f7e8134f331f343ebf451638f7d08516: Status 404 returned error can't find the container with id d63516f6c1b8a0b2d8fe82dbfbfba719f7e8134f331f343ebf451638f7d08516 Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.265750 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerID="e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1" exitCode=0 Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.265834 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerDied","Data":"e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1"} Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.265867 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerStarted","Data":"22b7d1e0b9d11106bbfb0a586aab40a2cb5b5afefb0fd65fc68fdcda6ac607e2"} Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.270312 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" event={"ID":"941adce7-fcb2-4191-920a-e9279f9ac0db","Type":"ContainerStarted","Data":"e36be0b8d6c1c39a374ac4abea90dccf9d7d6906cb86d350c5a74f6953cf078f"} Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.270420 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.272720 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerStarted","Data":"d63516f6c1b8a0b2d8fe82dbfbfba719f7e8134f331f343ebf451638f7d08516"} Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.274910 4773 generic.go:334] "Generic (PLEG): container finished" podID="574153bd-b3c2-4a73-81d2-15b68de060ca" containerID="79a757fb20e6cc41b11417a7ca0c3e5c02d6297bb825f3d71d0311748ab4973d" exitCode=0 Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.274971 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerDied","Data":"79a757fb20e6cc41b11417a7ca0c3e5c02d6297bb825f3d71d0311748ab4973d"} Jan 22 12:09:42 crc kubenswrapper[4773]: I0122 12:09:42.331676 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" podStartSLOduration=2.005058088 podStartE2EDuration="13.331637113s" podCreationTimestamp="2026-01-22 12:09:29 +0000 UTC" firstStartedPulling="2026-01-22 12:09:30.354270567 +0000 UTC m=+877.932386392" lastFinishedPulling="2026-01-22 12:09:41.680849592 +0000 UTC m=+889.258965417" observedRunningTime="2026-01-22 12:09:42.323480194 +0000 UTC m=+889.901596039" watchObservedRunningTime="2026-01-22 12:09:42.331637113 +0000 UTC m=+889.909752948" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.027537 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7"] Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.029044 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.030964 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.040049 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7"] Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.136160 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.136223 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmrsz\" (UniqueName: \"kubernetes.io/projected/d833d986-4a39-441d-8e37-20974f894e6e-kube-api-access-cmrsz\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.136548 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.237279 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.237363 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.237394 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmrsz\" (UniqueName: \"kubernetes.io/projected/d833d986-4a39-441d-8e37-20974f894e6e-kube-api-access-cmrsz\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.238235 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.238518 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.258164 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmrsz\" (UniqueName: \"kubernetes.io/projected/d833d986-4a39-441d-8e37-20974f894e6e-kube-api-access-cmrsz\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.282683 4773 generic.go:334] "Generic (PLEG): container finished" podID="574153bd-b3c2-4a73-81d2-15b68de060ca" containerID="f08549fd7e1406a5978e91355bf778cefe438e958f48a6aa24736f91a88f4c50" exitCode=0 Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.282780 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerDied","Data":"f08549fd7e1406a5978e91355bf778cefe438e958f48a6aa24736f91a88f4c50"} Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.286184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerStarted","Data":"974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab"} Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.289750 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerID="21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6" exitCode=0 Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.290943 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerDied","Data":"21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6"} Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.384734 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:43 crc kubenswrapper[4773]: I0122 12:09:43.829728 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7"] Jan 22 12:09:43 crc kubenswrapper[4773]: W0122 12:09:43.845537 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd833d986_4a39_441d_8e37_20974f894e6e.slice/crio-b931d86440d7b9297f944ec610079af55a02583324f044de01b3e47f016c9839 WatchSource:0}: Error finding container b931d86440d7b9297f944ec610079af55a02583324f044de01b3e47f016c9839: Status 404 returned error can't find the container with id b931d86440d7b9297f944ec610079af55a02583324f044de01b3e47f016c9839 Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.303184 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerID="974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab" exitCode=0 Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.303312 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerDied","Data":"974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab"} Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.309595 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerStarted","Data":"331fe461123a4392bb7f49d577daac330032c5f0a941a72ef79beb795e4c9ee2"} Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.309656 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerStarted","Data":"b931d86440d7b9297f944ec610079af55a02583324f044de01b3e47f016c9839"} Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.313998 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerStarted","Data":"5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f"} Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.316924 4773 generic.go:334] "Generic (PLEG): container finished" podID="574153bd-b3c2-4a73-81d2-15b68de060ca" containerID="a8e2fd01363e40b24b0dbf87a3f892657e8106020652d1eb20d9733c06d3b023" exitCode=0 Jan 22 12:09:44 crc kubenswrapper[4773]: I0122 12:09:44.316966 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerDied","Data":"a8e2fd01363e40b24b0dbf87a3f892657e8106020652d1eb20d9733c06d3b023"} Jan 22 12:09:45 crc kubenswrapper[4773]: I0122 12:09:45.345165 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"789decf14bf847a4e393f8178f25a6bdca09a16673b367531b205872b16ac60b"} Jan 22 12:09:45 crc kubenswrapper[4773]: I0122 12:09:45.348310 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerStarted","Data":"6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683"} Jan 22 12:09:45 crc kubenswrapper[4773]: I0122 12:09:45.350126 4773 generic.go:334] "Generic (PLEG): container finished" podID="d833d986-4a39-441d-8e37-20974f894e6e" containerID="331fe461123a4392bb7f49d577daac330032c5f0a941a72ef79beb795e4c9ee2" exitCode=0 Jan 22 12:09:45 crc kubenswrapper[4773]: I0122 12:09:45.350208 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerDied","Data":"331fe461123a4392bb7f49d577daac330032c5f0a941a72ef79beb795e4c9ee2"} Jan 22 12:09:45 crc kubenswrapper[4773]: I0122 12:09:45.369370 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-759fp" podStartSLOduration=2.8521922330000002 podStartE2EDuration="5.369356089s" podCreationTimestamp="2026-01-22 12:09:40 +0000 UTC" firstStartedPulling="2026-01-22 12:09:42.269204148 +0000 UTC m=+889.847319973" lastFinishedPulling="2026-01-22 12:09:44.786368004 +0000 UTC m=+892.364483829" observedRunningTime="2026-01-22 12:09:45.367746963 +0000 UTC m=+892.945862808" watchObservedRunningTime="2026-01-22 12:09:45.369356089 +0000 UTC m=+892.947471914" Jan 22 12:09:46 crc kubenswrapper[4773]: I0122 12:09:46.594584 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerID="5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f" exitCode=0 Jan 22 12:09:46 crc kubenswrapper[4773]: I0122 12:09:46.594676 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerDied","Data":"5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f"} Jan 22 12:09:46 crc kubenswrapper[4773]: I0122 12:09:46.618185 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"e1bdf7f22be9f9b847b318520692bfc3d1d969c76c16ddbb3cedeeb40816711f"} Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.629248 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerStarted","Data":"2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085"} Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.637872 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"29476d7b600cc8b1c7ae7db13a5e92c8f05e0f22d605e368d1a9b45a57455c24"} Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.637922 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"32e9a6d3567af67c8fbb0ed9a19b8682beb6a1828ecf4a0d6a31caaa1fd77e6d"} Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.637936 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"5eb4f64390a89aed28401a56e9f3f8960a133429c55b0231f3da5078ee307ae7"} Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.637949 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-28242" event={"ID":"574153bd-b3c2-4a73-81d2-15b68de060ca","Type":"ContainerStarted","Data":"8b94026c7369b940ef19cb299609caab56522a89931cea3bf93d650d5d1b00d3"} Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.638356 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-28242" Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.725421 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bbn9r" podStartSLOduration=5.893388796 podStartE2EDuration="9.725349594s" podCreationTimestamp="2026-01-22 12:09:38 +0000 UTC" firstStartedPulling="2026-01-22 12:09:43.291811558 +0000 UTC m=+890.869927383" lastFinishedPulling="2026-01-22 12:09:47.123772356 +0000 UTC m=+894.701888181" observedRunningTime="2026-01-22 12:09:47.698085458 +0000 UTC m=+895.276201273" watchObservedRunningTime="2026-01-22 12:09:47.725349594 +0000 UTC m=+895.303465419" Jan 22 12:09:47 crc kubenswrapper[4773]: I0122 12:09:47.728013 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-28242" podStartSLOduration=7.288210032 podStartE2EDuration="18.728001659s" podCreationTimestamp="2026-01-22 12:09:29 +0000 UTC" firstStartedPulling="2026-01-22 12:09:30.246637652 +0000 UTC m=+877.824753477" lastFinishedPulling="2026-01-22 12:09:41.686429279 +0000 UTC m=+889.264545104" observedRunningTime="2026-01-22 12:09:47.723498342 +0000 UTC m=+895.301614187" watchObservedRunningTime="2026-01-22 12:09:47.728001659 +0000 UTC m=+895.306117484" Jan 22 12:09:48 crc kubenswrapper[4773]: I0122 12:09:48.577516 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:48 crc kubenswrapper[4773]: I0122 12:09:48.577589 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:49 crc kubenswrapper[4773]: I0122 12:09:49.698194 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-7vtfm" Jan 22 12:09:49 crc kubenswrapper[4773]: I0122 12:09:49.705007 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-bbn9r" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="registry-server" probeResult="failure" output=< Jan 22 12:09:49 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 12:09:49 crc kubenswrapper[4773]: > Jan 22 12:09:50 crc kubenswrapper[4773]: I0122 12:09:50.158386 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-28242" Jan 22 12:09:50 crc kubenswrapper[4773]: I0122 12:09:50.214992 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-28242" Jan 22 12:09:51 crc kubenswrapper[4773]: I0122 12:09:51.072570 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:51 crc kubenswrapper[4773]: I0122 12:09:51.072857 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:51 crc kubenswrapper[4773]: I0122 12:09:51.120071 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:51 crc kubenswrapper[4773]: I0122 12:09:51.789027 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:53 crc kubenswrapper[4773]: E0122 12:09:53.425549 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd833d986_4a39_441d_8e37_20974f894e6e.slice/crio-022700468f92e215f4bd9135f702424fab98179dc295aa4fe0139c2f0dcbed7c.scope\": RecentStats: unable to find data in memory cache]" Jan 22 12:09:53 crc kubenswrapper[4773]: I0122 12:09:53.577693 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-759fp"] Jan 22 12:09:53 crc kubenswrapper[4773]: I0122 12:09:53.730629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerStarted","Data":"022700468f92e215f4bd9135f702424fab98179dc295aa4fe0139c2f0dcbed7c"} Jan 22 12:09:54 crc kubenswrapper[4773]: I0122 12:09:54.739682 4773 generic.go:334] "Generic (PLEG): container finished" podID="d833d986-4a39-441d-8e37-20974f894e6e" containerID="022700468f92e215f4bd9135f702424fab98179dc295aa4fe0139c2f0dcbed7c" exitCode=0 Jan 22 12:09:54 crc kubenswrapper[4773]: I0122 12:09:54.739943 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerDied","Data":"022700468f92e215f4bd9135f702424fab98179dc295aa4fe0139c2f0dcbed7c"} Jan 22 12:09:54 crc kubenswrapper[4773]: I0122 12:09:54.740523 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-759fp" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="registry-server" containerID="cri-o://6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683" gracePeriod=2 Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.102185 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.160425 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjfc9\" (UniqueName: \"kubernetes.io/projected/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-kube-api-access-wjfc9\") pod \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.160481 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-catalog-content\") pod \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.160543 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-utilities\") pod \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\" (UID: \"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba\") " Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.161393 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-utilities" (OuterVolumeSpecName: "utilities") pod "d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" (UID: "d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.168745 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-kube-api-access-wjfc9" (OuterVolumeSpecName: "kube-api-access-wjfc9") pod "d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" (UID: "d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba"). InnerVolumeSpecName "kube-api-access-wjfc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.183999 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" (UID: "d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.262090 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjfc9\" (UniqueName: \"kubernetes.io/projected/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-kube-api-access-wjfc9\") on node \"crc\" DevicePath \"\"" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.262144 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.262156 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.747451 4773 generic.go:334] "Generic (PLEG): container finished" podID="d833d986-4a39-441d-8e37-20974f894e6e" containerID="53204a096016a0c6edb2bda0850673b84206d6a8d0593e01eb1b662c06c64397" exitCode=0 Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.747532 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerDied","Data":"53204a096016a0c6edb2bda0850673b84206d6a8d0593e01eb1b662c06c64397"} Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.750208 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerID="6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683" exitCode=0 Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.750264 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerDied","Data":"6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683"} Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.750295 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-759fp" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.750321 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-759fp" event={"ID":"d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba","Type":"ContainerDied","Data":"22b7d1e0b9d11106bbfb0a586aab40a2cb5b5afefb0fd65fc68fdcda6ac607e2"} Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.750340 4773 scope.go:117] "RemoveContainer" containerID="6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.767412 4773 scope.go:117] "RemoveContainer" containerID="974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.783013 4773 scope.go:117] "RemoveContainer" containerID="e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.784358 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-759fp"] Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.788732 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-759fp"] Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.796226 4773 scope.go:117] "RemoveContainer" containerID="6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683" Jan 22 12:09:55 crc kubenswrapper[4773]: E0122 12:09:55.796713 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683\": container with ID starting with 6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683 not found: ID does not exist" containerID="6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.796763 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683"} err="failed to get container status \"6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683\": rpc error: code = NotFound desc = could not find container \"6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683\": container with ID starting with 6d8ab618025f544837bde83c7afdbf25e25020f7b8171aaef5cc2f77f546f683 not found: ID does not exist" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.796797 4773 scope.go:117] "RemoveContainer" containerID="974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab" Jan 22 12:09:55 crc kubenswrapper[4773]: E0122 12:09:55.797178 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab\": container with ID starting with 974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab not found: ID does not exist" containerID="974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.797210 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab"} err="failed to get container status \"974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab\": rpc error: code = NotFound desc = could not find container \"974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab\": container with ID starting with 974a8bdb16257f782834d6c41d39ce7c5b91f9023e4cb35401c94ba4b0ecc2ab not found: ID does not exist" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.797230 4773 scope.go:117] "RemoveContainer" containerID="e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1" Jan 22 12:09:55 crc kubenswrapper[4773]: E0122 12:09:55.797550 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1\": container with ID starting with e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1 not found: ID does not exist" containerID="e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1" Jan 22 12:09:55 crc kubenswrapper[4773]: I0122 12:09:55.797571 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1"} err="failed to get container status \"e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1\": rpc error: code = NotFound desc = could not find container \"e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1\": container with ID starting with e2b35df24a80e157d23f6b4d1a4ae17ff97289b45ad692002651d0ea52380cd1 not found: ID does not exist" Jan 22 12:09:56 crc kubenswrapper[4773]: I0122 12:09:56.667352 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" path="/var/lib/kubelet/pods/d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba/volumes" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.071596 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.084389 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-bundle\") pod \"d833d986-4a39-441d-8e37-20974f894e6e\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.084454 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmrsz\" (UniqueName: \"kubernetes.io/projected/d833d986-4a39-441d-8e37-20974f894e6e-kube-api-access-cmrsz\") pod \"d833d986-4a39-441d-8e37-20974f894e6e\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.084481 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-util\") pod \"d833d986-4a39-441d-8e37-20974f894e6e\" (UID: \"d833d986-4a39-441d-8e37-20974f894e6e\") " Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.085537 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-bundle" (OuterVolumeSpecName: "bundle") pod "d833d986-4a39-441d-8e37-20974f894e6e" (UID: "d833d986-4a39-441d-8e37-20974f894e6e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.134975 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d833d986-4a39-441d-8e37-20974f894e6e-kube-api-access-cmrsz" (OuterVolumeSpecName: "kube-api-access-cmrsz") pod "d833d986-4a39-441d-8e37-20974f894e6e" (UID: "d833d986-4a39-441d-8e37-20974f894e6e"). InnerVolumeSpecName "kube-api-access-cmrsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.139259 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-util" (OuterVolumeSpecName: "util") pod "d833d986-4a39-441d-8e37-20974f894e6e" (UID: "d833d986-4a39-441d-8e37-20974f894e6e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.185607 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmrsz\" (UniqueName: \"kubernetes.io/projected/d833d986-4a39-441d-8e37-20974f894e6e-kube-api-access-cmrsz\") on node \"crc\" DevicePath \"\"" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.185641 4773 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-util\") on node \"crc\" DevicePath \"\"" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.185652 4773 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d833d986-4a39-441d-8e37-20974f894e6e-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.764615 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" event={"ID":"d833d986-4a39-441d-8e37-20974f894e6e","Type":"ContainerDied","Data":"b931d86440d7b9297f944ec610079af55a02583324f044de01b3e47f016c9839"} Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.764655 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b931d86440d7b9297f944ec610079af55a02583324f044de01b3e47f016c9839" Jan 22 12:09:57 crc kubenswrapper[4773]: I0122 12:09:57.764673 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7" Jan 22 12:09:58 crc kubenswrapper[4773]: I0122 12:09:58.601641 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:09:58 crc kubenswrapper[4773]: I0122 12:09:58.647566 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.102011 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-28242" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.114575 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-s46dq" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165421 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd"] Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.165663 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="extract" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165679 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="extract" Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.165691 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="util" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165699 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="util" Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.165710 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="pull" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165717 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="pull" Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.165731 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="extract-utilities" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165737 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="extract-utilities" Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.165747 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="registry-server" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165753 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="registry-server" Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.165759 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="extract-content" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165765 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="extract-content" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165862 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d833d986-4a39-441d-8e37-20974f894e6e" containerName="extract" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.165879 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ace3fd-01d5-4b18-b4a6-b8ad7b72c9ba" containerName="registry-server" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.166259 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:00 crc kubenswrapper[4773]: W0122 12:10:00.168347 4773 reflector.go:561] object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-qq889": failed to list *v1.Secret: secrets "cert-manager-operator-controller-manager-dockercfg-qq889" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "cert-manager-operator": no relationship found between node 'crc' and this object Jan 22 12:10:00 crc kubenswrapper[4773]: W0122 12:10:00.168366 4773 reflector.go:561] object-"cert-manager-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "cert-manager-operator": no relationship found between node 'crc' and this object Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.168399 4773 reflector.go:158] "Unhandled Error" err="object-\"cert-manager-operator\"/\"cert-manager-operator-controller-manager-dockercfg-qq889\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-manager-operator-controller-manager-dockercfg-qq889\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"cert-manager-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.168407 4773 reflector.go:158] "Unhandled Error" err="object-\"cert-manager-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"cert-manager-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 12:10:00 crc kubenswrapper[4773]: W0122 12:10:00.168837 4773 reflector.go:561] object-"cert-manager-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "cert-manager-operator": no relationship found between node 'crc' and this object Jan 22 12:10:00 crc kubenswrapper[4773]: E0122 12:10:00.168887 4773 reflector.go:158] "Unhandled Error" err="object-\"cert-manager-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"cert-manager-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.218494 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd"] Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.298020 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wzvg\" (UniqueName: \"kubernetes.io/projected/14afe6ec-c013-4865-a430-2ad45da63cd2-kube-api-access-4wzvg\") pod \"cert-manager-operator-controller-manager-64cf6dff88-6t8vd\" (UID: \"14afe6ec-c013-4865-a430-2ad45da63cd2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.298112 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/14afe6ec-c013-4865-a430-2ad45da63cd2-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-6t8vd\" (UID: \"14afe6ec-c013-4865-a430-2ad45da63cd2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.399126 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/14afe6ec-c013-4865-a430-2ad45da63cd2-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-6t8vd\" (UID: \"14afe6ec-c013-4865-a430-2ad45da63cd2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.399211 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wzvg\" (UniqueName: \"kubernetes.io/projected/14afe6ec-c013-4865-a430-2ad45da63cd2-kube-api-access-4wzvg\") pod \"cert-manager-operator-controller-manager-64cf6dff88-6t8vd\" (UID: \"14afe6ec-c013-4865-a430-2ad45da63cd2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:00 crc kubenswrapper[4773]: I0122 12:10:00.399893 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/14afe6ec-c013-4865-a430-2ad45da63cd2-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-6t8vd\" (UID: \"14afe6ec-c013-4865-a430-2ad45da63cd2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.047029 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.134623 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.151301 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wzvg\" (UniqueName: \"kubernetes.io/projected/14afe6ec-c013-4865-a430-2ad45da63cd2-kube-api-access-4wzvg\") pod \"cert-manager-operator-controller-manager-64cf6dff88-6t8vd\" (UID: \"14afe6ec-c013-4865-a430-2ad45da63cd2\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.615108 4773 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-qq889" Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.624805 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.784347 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bbn9r"] Jan 22 12:10:01 crc kubenswrapper[4773]: I0122 12:10:01.784783 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bbn9r" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="registry-server" containerID="cri-o://2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085" gracePeriod=2 Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.087573 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd"] Jan 22 12:10:02 crc kubenswrapper[4773]: W0122 12:10:02.096500 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14afe6ec_c013_4865_a430_2ad45da63cd2.slice/crio-b960d33f9f08ce2b5192f4371f2b64750388d63aef0fcd06cba015250cf845b5 WatchSource:0}: Error finding container b960d33f9f08ce2b5192f4371f2b64750388d63aef0fcd06cba015250cf845b5: Status 404 returned error can't find the container with id b960d33f9f08ce2b5192f4371f2b64750388d63aef0fcd06cba015250cf845b5 Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.122973 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.223536 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr6wq\" (UniqueName: \"kubernetes.io/projected/6d9036ea-93ec-4784-a703-2a4db61c0f23-kube-api-access-gr6wq\") pod \"6d9036ea-93ec-4784-a703-2a4db61c0f23\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.224536 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-utilities\") pod \"6d9036ea-93ec-4784-a703-2a4db61c0f23\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.224832 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-catalog-content\") pod \"6d9036ea-93ec-4784-a703-2a4db61c0f23\" (UID: \"6d9036ea-93ec-4784-a703-2a4db61c0f23\") " Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.225775 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-utilities" (OuterVolumeSpecName: "utilities") pod "6d9036ea-93ec-4784-a703-2a4db61c0f23" (UID: "6d9036ea-93ec-4784-a703-2a4db61c0f23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.230943 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d9036ea-93ec-4784-a703-2a4db61c0f23-kube-api-access-gr6wq" (OuterVolumeSpecName: "kube-api-access-gr6wq") pod "6d9036ea-93ec-4784-a703-2a4db61c0f23" (UID: "6d9036ea-93ec-4784-a703-2a4db61c0f23"). InnerVolumeSpecName "kube-api-access-gr6wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.287736 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6d9036ea-93ec-4784-a703-2a4db61c0f23" (UID: "6d9036ea-93ec-4784-a703-2a4db61c0f23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.326696 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.326748 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr6wq\" (UniqueName: \"kubernetes.io/projected/6d9036ea-93ec-4784-a703-2a4db61c0f23-kube-api-access-gr6wq\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.326774 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6d9036ea-93ec-4784-a703-2a4db61c0f23-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.794108 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerID="2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085" exitCode=0 Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.794184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerDied","Data":"2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085"} Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.794192 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bbn9r" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.794221 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bbn9r" event={"ID":"6d9036ea-93ec-4784-a703-2a4db61c0f23","Type":"ContainerDied","Data":"d63516f6c1b8a0b2d8fe82dbfbfba719f7e8134f331f343ebf451638f7d08516"} Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.794243 4773 scope.go:117] "RemoveContainer" containerID="2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.796023 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" event={"ID":"14afe6ec-c013-4865-a430-2ad45da63cd2","Type":"ContainerStarted","Data":"b960d33f9f08ce2b5192f4371f2b64750388d63aef0fcd06cba015250cf845b5"} Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.814342 4773 scope.go:117] "RemoveContainer" containerID="5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.817665 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bbn9r"] Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.821246 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bbn9r"] Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.828771 4773 scope.go:117] "RemoveContainer" containerID="21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.860621 4773 scope.go:117] "RemoveContainer" containerID="2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085" Jan 22 12:10:02 crc kubenswrapper[4773]: E0122 12:10:02.861131 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085\": container with ID starting with 2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085 not found: ID does not exist" containerID="2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.861197 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085"} err="failed to get container status \"2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085\": rpc error: code = NotFound desc = could not find container \"2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085\": container with ID starting with 2a03d72a4cef357fbee7f6a951e96bd13d57d615a7ad2c9906d00e8939045085 not found: ID does not exist" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.861235 4773 scope.go:117] "RemoveContainer" containerID="5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f" Jan 22 12:10:02 crc kubenswrapper[4773]: E0122 12:10:02.861784 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f\": container with ID starting with 5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f not found: ID does not exist" containerID="5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.861831 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f"} err="failed to get container status \"5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f\": rpc error: code = NotFound desc = could not find container \"5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f\": container with ID starting with 5bc42b180487e4387161b6296038865cc23ce3df6b274489f001e0d65344805f not found: ID does not exist" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.861859 4773 scope.go:117] "RemoveContainer" containerID="21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6" Jan 22 12:10:02 crc kubenswrapper[4773]: E0122 12:10:02.862106 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6\": container with ID starting with 21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6 not found: ID does not exist" containerID="21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6" Jan 22 12:10:02 crc kubenswrapper[4773]: I0122 12:10:02.862141 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6"} err="failed to get container status \"21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6\": rpc error: code = NotFound desc = could not find container \"21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6\": container with ID starting with 21e4aaa5f72b938780e4ade59c8fe2f06fcae5c13a7e54c4ffad612b24e21cf6 not found: ID does not exist" Jan 22 12:10:04 crc kubenswrapper[4773]: I0122 12:10:04.074991 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:10:04 crc kubenswrapper[4773]: I0122 12:10:04.075526 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:10:04 crc kubenswrapper[4773]: I0122 12:10:04.666579 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" path="/var/lib/kubelet/pods/6d9036ea-93ec-4784-a703-2a4db61c0f23/volumes" Jan 22 12:10:09 crc kubenswrapper[4773]: I0122 12:10:09.846123 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" event={"ID":"14afe6ec-c013-4865-a430-2ad45da63cd2","Type":"ContainerStarted","Data":"cd6f3d53755310acfc0c3a220d77262e1a40fe43cfffca440660af5f92562a21"} Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.111781 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-6t8vd" podStartSLOduration=6.857740501 podStartE2EDuration="14.111762442s" podCreationTimestamp="2026-01-22 12:10:00 +0000 UTC" firstStartedPulling="2026-01-22 12:10:02.100064667 +0000 UTC m=+909.678180492" lastFinishedPulling="2026-01-22 12:10:09.354086608 +0000 UTC m=+916.932202433" observedRunningTime="2026-01-22 12:10:09.883857978 +0000 UTC m=+917.461973813" watchObservedRunningTime="2026-01-22 12:10:14.111762442 +0000 UTC m=+921.689878267" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.113225 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-dz5f5"] Jan 22 12:10:14 crc kubenswrapper[4773]: E0122 12:10:14.113637 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="extract-utilities" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.113734 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="extract-utilities" Jan 22 12:10:14 crc kubenswrapper[4773]: E0122 12:10:14.113832 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="registry-server" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.113911 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="registry-server" Jan 22 12:10:14 crc kubenswrapper[4773]: E0122 12:10:14.113986 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="extract-content" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.114047 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="extract-content" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.114225 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d9036ea-93ec-4784-a703-2a4db61c0f23" containerName="registry-server" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.114701 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.117410 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.117835 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.117834 4773 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dppsj" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.123857 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-dz5f5"] Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.229889 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c68v2\" (UniqueName: \"kubernetes.io/projected/e5163b9e-7df3-4616-a466-c36185e99397-kube-api-access-c68v2\") pod \"cert-manager-webhook-f4fb5df64-dz5f5\" (UID: \"e5163b9e-7df3-4616-a466-c36185e99397\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.230151 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e5163b9e-7df3-4616-a466-c36185e99397-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-dz5f5\" (UID: \"e5163b9e-7df3-4616-a466-c36185e99397\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.331876 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c68v2\" (UniqueName: \"kubernetes.io/projected/e5163b9e-7df3-4616-a466-c36185e99397-kube-api-access-c68v2\") pod \"cert-manager-webhook-f4fb5df64-dz5f5\" (UID: \"e5163b9e-7df3-4616-a466-c36185e99397\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.331972 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e5163b9e-7df3-4616-a466-c36185e99397-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-dz5f5\" (UID: \"e5163b9e-7df3-4616-a466-c36185e99397\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.355110 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c68v2\" (UniqueName: \"kubernetes.io/projected/e5163b9e-7df3-4616-a466-c36185e99397-kube-api-access-c68v2\") pod \"cert-manager-webhook-f4fb5df64-dz5f5\" (UID: \"e5163b9e-7df3-4616-a466-c36185e99397\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.355299 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e5163b9e-7df3-4616-a466-c36185e99397-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-dz5f5\" (UID: \"e5163b9e-7df3-4616-a466-c36185e99397\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.450857 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:14 crc kubenswrapper[4773]: I0122 12:10:14.892740 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-dz5f5"] Jan 22 12:10:15 crc kubenswrapper[4773]: I0122 12:10:15.887672 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" event={"ID":"e5163b9e-7df3-4616-a466-c36185e99397","Type":"ContainerStarted","Data":"9c4ab7e7c2a33324ff79820d9ffe5e28380b2d7a14aaf4ff76a37d4eef5c8e0c"} Jan 22 12:10:16 crc kubenswrapper[4773]: I0122 12:10:16.882859 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg"] Jan 22 12:10:16 crc kubenswrapper[4773]: I0122 12:10:16.883933 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:16 crc kubenswrapper[4773]: I0122 12:10:16.886213 4773 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-ndc8d" Jan 22 12:10:16 crc kubenswrapper[4773]: I0122 12:10:16.897614 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg"] Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.041059 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6587a6b-06d7-43c0-8db4-dcc854df334d-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-qnqwg\" (UID: \"b6587a6b-06d7-43c0-8db4-dcc854df334d\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.041130 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xzf5\" (UniqueName: \"kubernetes.io/projected/b6587a6b-06d7-43c0-8db4-dcc854df334d-kube-api-access-8xzf5\") pod \"cert-manager-cainjector-855d9ccff4-qnqwg\" (UID: \"b6587a6b-06d7-43c0-8db4-dcc854df334d\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.142781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6587a6b-06d7-43c0-8db4-dcc854df334d-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-qnqwg\" (UID: \"b6587a6b-06d7-43c0-8db4-dcc854df334d\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.142838 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xzf5\" (UniqueName: \"kubernetes.io/projected/b6587a6b-06d7-43c0-8db4-dcc854df334d-kube-api-access-8xzf5\") pod \"cert-manager-cainjector-855d9ccff4-qnqwg\" (UID: \"b6587a6b-06d7-43c0-8db4-dcc854df334d\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.160405 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xzf5\" (UniqueName: \"kubernetes.io/projected/b6587a6b-06d7-43c0-8db4-dcc854df334d-kube-api-access-8xzf5\") pod \"cert-manager-cainjector-855d9ccff4-qnqwg\" (UID: \"b6587a6b-06d7-43c0-8db4-dcc854df334d\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.162162 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b6587a6b-06d7-43c0-8db4-dcc854df334d-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-qnqwg\" (UID: \"b6587a6b-06d7-43c0-8db4-dcc854df334d\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.205231 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.434102 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg"] Jan 22 12:10:17 crc kubenswrapper[4773]: I0122 12:10:17.901221 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" event={"ID":"b6587a6b-06d7-43c0-8db4-dcc854df334d","Type":"ContainerStarted","Data":"efda23fef9298b188594edfe819aaa16f437bd5d686faf2fc76aa8a506512cf8"} Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.438093 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2xpch"] Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.439352 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.455612 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2xpch"] Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.562022 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvkgx\" (UniqueName: \"kubernetes.io/projected/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-kube-api-access-nvkgx\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.562071 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-utilities\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.562170 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-catalog-content\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.663909 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-catalog-content\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.663950 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvkgx\" (UniqueName: \"kubernetes.io/projected/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-kube-api-access-nvkgx\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.663975 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-utilities\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.664396 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-utilities\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.664524 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-catalog-content\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.683118 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvkgx\" (UniqueName: \"kubernetes.io/projected/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-kube-api-access-nvkgx\") pod \"certified-operators-2xpch\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:18 crc kubenswrapper[4773]: I0122 12:10:18.756491 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:19 crc kubenswrapper[4773]: I0122 12:10:19.276302 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2xpch"] Jan 22 12:10:19 crc kubenswrapper[4773]: W0122 12:10:19.287387 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88132aba_cb2d_4fd5_a26d_eb8fcc81d198.slice/crio-d39d908ea77191bd8c127825344b5e39eeb046c08f280980dbde5d4f4f9b83a6 WatchSource:0}: Error finding container d39d908ea77191bd8c127825344b5e39eeb046c08f280980dbde5d4f4f9b83a6: Status 404 returned error can't find the container with id d39d908ea77191bd8c127825344b5e39eeb046c08f280980dbde5d4f4f9b83a6 Jan 22 12:10:19 crc kubenswrapper[4773]: I0122 12:10:19.924474 4773 generic.go:334] "Generic (PLEG): container finished" podID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerID="48f00511157f390ce52313094494466bbf163c64892b374e004457aa36fa8c1b" exitCode=0 Jan 22 12:10:19 crc kubenswrapper[4773]: I0122 12:10:19.924569 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerDied","Data":"48f00511157f390ce52313094494466bbf163c64892b374e004457aa36fa8c1b"} Jan 22 12:10:19 crc kubenswrapper[4773]: I0122 12:10:19.924616 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerStarted","Data":"d39d908ea77191bd8c127825344b5e39eeb046c08f280980dbde5d4f4f9b83a6"} Jan 22 12:10:21 crc kubenswrapper[4773]: I0122 12:10:21.961005 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerStarted","Data":"cea4bf3693e4287495233905bef2b1202e373eb9ce390c2a41f1b8fc9f70dcca"} Jan 22 12:10:23 crc kubenswrapper[4773]: I0122 12:10:23.012041 4773 generic.go:334] "Generic (PLEG): container finished" podID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerID="cea4bf3693e4287495233905bef2b1202e373eb9ce390c2a41f1b8fc9f70dcca" exitCode=0 Jan 22 12:10:23 crc kubenswrapper[4773]: I0122 12:10:23.012520 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerDied","Data":"cea4bf3693e4287495233905bef2b1202e373eb9ce390c2a41f1b8fc9f70dcca"} Jan 22 12:10:24 crc kubenswrapper[4773]: I0122 12:10:24.819726 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qmnzk"] Jan 22 12:10:24 crc kubenswrapper[4773]: I0122 12:10:24.821013 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:24 crc kubenswrapper[4773]: I0122 12:10:24.825733 4773 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-7cs64" Jan 22 12:10:24 crc kubenswrapper[4773]: I0122 12:10:24.940128 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/220217e9-63dc-4215-b315-3c9290e38510-bound-sa-token\") pod \"cert-manager-86cb77c54b-qmnzk\" (UID: \"220217e9-63dc-4215-b315-3c9290e38510\") " pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:24 crc kubenswrapper[4773]: I0122 12:10:24.940309 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46xpz\" (UniqueName: \"kubernetes.io/projected/220217e9-63dc-4215-b315-3c9290e38510-kube-api-access-46xpz\") pod \"cert-manager-86cb77c54b-qmnzk\" (UID: \"220217e9-63dc-4215-b315-3c9290e38510\") " pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:24 crc kubenswrapper[4773]: I0122 12:10:24.990725 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qmnzk"] Jan 22 12:10:25 crc kubenswrapper[4773]: I0122 12:10:25.041994 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46xpz\" (UniqueName: \"kubernetes.io/projected/220217e9-63dc-4215-b315-3c9290e38510-kube-api-access-46xpz\") pod \"cert-manager-86cb77c54b-qmnzk\" (UID: \"220217e9-63dc-4215-b315-3c9290e38510\") " pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:25 crc kubenswrapper[4773]: I0122 12:10:25.042115 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/220217e9-63dc-4215-b315-3c9290e38510-bound-sa-token\") pod \"cert-manager-86cb77c54b-qmnzk\" (UID: \"220217e9-63dc-4215-b315-3c9290e38510\") " pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:25 crc kubenswrapper[4773]: I0122 12:10:25.067394 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/220217e9-63dc-4215-b315-3c9290e38510-bound-sa-token\") pod \"cert-manager-86cb77c54b-qmnzk\" (UID: \"220217e9-63dc-4215-b315-3c9290e38510\") " pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:25 crc kubenswrapper[4773]: I0122 12:10:25.073402 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46xpz\" (UniqueName: \"kubernetes.io/projected/220217e9-63dc-4215-b315-3c9290e38510-kube-api-access-46xpz\") pod \"cert-manager-86cb77c54b-qmnzk\" (UID: \"220217e9-63dc-4215-b315-3c9290e38510\") " pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:25 crc kubenswrapper[4773]: I0122 12:10:25.146630 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-qmnzk" Jan 22 12:10:30 crc kubenswrapper[4773]: I0122 12:10:30.916649 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qmnzk"] Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.081731 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-qmnzk" event={"ID":"220217e9-63dc-4215-b315-3c9290e38510","Type":"ContainerStarted","Data":"b85862874ef76258b541309108ed3b4585a2c2df62b9d8c1a6d0d3946d0b900f"} Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.081780 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-qmnzk" event={"ID":"220217e9-63dc-4215-b315-3c9290e38510","Type":"ContainerStarted","Data":"95c95f9b88984f4236e9da27eabb063f63f62c81585f2335dc85aa255cfd9888"} Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.084251 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" event={"ID":"e5163b9e-7df3-4616-a466-c36185e99397","Type":"ContainerStarted","Data":"347a9fe7ed84e780fa07b51db731a47d3604bc55406223cad58dc46608a9133e"} Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.084403 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.086639 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerStarted","Data":"55c160d9b90e4d4decd5b742bf1efbaabb75061b99095d3ab0ca3e69eee17f94"} Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.088174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" event={"ID":"b6587a6b-06d7-43c0-8db4-dcc854df334d","Type":"ContainerStarted","Data":"7e0bee0ce420482360837af99572ac39680e9a9eb454817d18d0e768221a8c08"} Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.102704 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-qmnzk" podStartSLOduration=7.102685644 podStartE2EDuration="7.102685644s" podCreationTimestamp="2026-01-22 12:10:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:10:31.100828032 +0000 UTC m=+938.678943857" watchObservedRunningTime="2026-01-22 12:10:31.102685644 +0000 UTC m=+938.680801469" Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.122674 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2xpch" podStartSLOduration=2.494294534 podStartE2EDuration="13.122642906s" podCreationTimestamp="2026-01-22 12:10:18 +0000 UTC" firstStartedPulling="2026-01-22 12:10:19.926097782 +0000 UTC m=+927.504213607" lastFinishedPulling="2026-01-22 12:10:30.554446164 +0000 UTC m=+938.132561979" observedRunningTime="2026-01-22 12:10:31.119465987 +0000 UTC m=+938.697581812" watchObservedRunningTime="2026-01-22 12:10:31.122642906 +0000 UTC m=+938.700758731" Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.146552 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" podStartSLOduration=1.4435638530000001 podStartE2EDuration="17.14653587s" podCreationTimestamp="2026-01-22 12:10:14 +0000 UTC" firstStartedPulling="2026-01-22 12:10:14.901917319 +0000 UTC m=+922.480033144" lastFinishedPulling="2026-01-22 12:10:30.604889336 +0000 UTC m=+938.183005161" observedRunningTime="2026-01-22 12:10:31.145313145 +0000 UTC m=+938.723428970" watchObservedRunningTime="2026-01-22 12:10:31.14653587 +0000 UTC m=+938.724651685" Jan 22 12:10:31 crc kubenswrapper[4773]: I0122 12:10:31.169355 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qnqwg" podStartSLOduration=2.047940786 podStartE2EDuration="15.169336592s" podCreationTimestamp="2026-01-22 12:10:16 +0000 UTC" firstStartedPulling="2026-01-22 12:10:17.446501727 +0000 UTC m=+925.024617552" lastFinishedPulling="2026-01-22 12:10:30.567897533 +0000 UTC m=+938.146013358" observedRunningTime="2026-01-22 12:10:31.168411366 +0000 UTC m=+938.746527191" watchObservedRunningTime="2026-01-22 12:10:31.169336592 +0000 UTC m=+938.747452417" Jan 22 12:10:34 crc kubenswrapper[4773]: I0122 12:10:34.073980 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:10:34 crc kubenswrapper[4773]: I0122 12:10:34.075131 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:10:38 crc kubenswrapper[4773]: I0122 12:10:38.757228 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:38 crc kubenswrapper[4773]: I0122 12:10:38.757299 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:38 crc kubenswrapper[4773]: I0122 12:10:38.801367 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:39 crc kubenswrapper[4773]: I0122 12:10:39.174347 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:39 crc kubenswrapper[4773]: I0122 12:10:39.226321 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2xpch"] Jan 22 12:10:39 crc kubenswrapper[4773]: I0122 12:10:39.455035 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-dz5f5" Jan 22 12:10:41 crc kubenswrapper[4773]: I0122 12:10:41.141900 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2xpch" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="registry-server" containerID="cri-o://55c160d9b90e4d4decd5b742bf1efbaabb75061b99095d3ab0ca3e69eee17f94" gracePeriod=2 Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.786305 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-vjw6f"] Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.787715 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.789689 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.789925 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rqg74" Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.789982 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.856431 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vjw6f"] Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.886562 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxw5q\" (UniqueName: \"kubernetes.io/projected/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074-kube-api-access-dxw5q\") pod \"openstack-operator-index-vjw6f\" (UID: \"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074\") " pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:42 crc kubenswrapper[4773]: I0122 12:10:42.987617 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxw5q\" (UniqueName: \"kubernetes.io/projected/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074-kube-api-access-dxw5q\") pod \"openstack-operator-index-vjw6f\" (UID: \"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074\") " pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:43 crc kubenswrapper[4773]: I0122 12:10:43.018152 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxw5q\" (UniqueName: \"kubernetes.io/projected/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074-kube-api-access-dxw5q\") pod \"openstack-operator-index-vjw6f\" (UID: \"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074\") " pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:43 crc kubenswrapper[4773]: I0122 12:10:43.120842 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:43 crc kubenswrapper[4773]: I0122 12:10:43.534681 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vjw6f"] Jan 22 12:10:43 crc kubenswrapper[4773]: W0122 12:10:43.538190 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cb765b7_3f9f_4e7f_8d08_9d95bfefc074.slice/crio-94940968ae8f3ec599ba196f358a1e9ccd71ec25f3f6686ba84fc1891d551984 WatchSource:0}: Error finding container 94940968ae8f3ec599ba196f358a1e9ccd71ec25f3f6686ba84fc1891d551984: Status 404 returned error can't find the container with id 94940968ae8f3ec599ba196f358a1e9ccd71ec25f3f6686ba84fc1891d551984 Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.176510 4773 generic.go:334] "Generic (PLEG): container finished" podID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerID="55c160d9b90e4d4decd5b742bf1efbaabb75061b99095d3ab0ca3e69eee17f94" exitCode=0 Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.176927 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerDied","Data":"55c160d9b90e4d4decd5b742bf1efbaabb75061b99095d3ab0ca3e69eee17f94"} Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.176955 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2xpch" event={"ID":"88132aba-cb2d-4fd5-a26d-eb8fcc81d198","Type":"ContainerDied","Data":"d39d908ea77191bd8c127825344b5e39eeb046c08f280980dbde5d4f4f9b83a6"} Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.176969 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d39d908ea77191bd8c127825344b5e39eeb046c08f280980dbde5d4f4f9b83a6" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.177923 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vjw6f" event={"ID":"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074","Type":"ContainerStarted","Data":"94940968ae8f3ec599ba196f358a1e9ccd71ec25f3f6686ba84fc1891d551984"} Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.190983 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.201363 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-utilities\") pod \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.201420 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvkgx\" (UniqueName: \"kubernetes.io/projected/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-kube-api-access-nvkgx\") pod \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.201482 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-catalog-content\") pod \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\" (UID: \"88132aba-cb2d-4fd5-a26d-eb8fcc81d198\") " Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.202847 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-utilities" (OuterVolumeSpecName: "utilities") pod "88132aba-cb2d-4fd5-a26d-eb8fcc81d198" (UID: "88132aba-cb2d-4fd5-a26d-eb8fcc81d198"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.211815 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-kube-api-access-nvkgx" (OuterVolumeSpecName: "kube-api-access-nvkgx") pod "88132aba-cb2d-4fd5-a26d-eb8fcc81d198" (UID: "88132aba-cb2d-4fd5-a26d-eb8fcc81d198"). InnerVolumeSpecName "kube-api-access-nvkgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.253232 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88132aba-cb2d-4fd5-a26d-eb8fcc81d198" (UID: "88132aba-cb2d-4fd5-a26d-eb8fcc81d198"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.302153 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.302552 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:44 crc kubenswrapper[4773]: I0122 12:10:44.302570 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvkgx\" (UniqueName: \"kubernetes.io/projected/88132aba-cb2d-4fd5-a26d-eb8fcc81d198-kube-api-access-nvkgx\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:45 crc kubenswrapper[4773]: I0122 12:10:45.185050 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vjw6f" event={"ID":"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074","Type":"ContainerStarted","Data":"b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098"} Jan 22 12:10:45 crc kubenswrapper[4773]: I0122 12:10:45.185094 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2xpch" Jan 22 12:10:45 crc kubenswrapper[4773]: I0122 12:10:45.200796 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-vjw6f" podStartSLOduration=2.321233172 podStartE2EDuration="3.200776127s" podCreationTimestamp="2026-01-22 12:10:42 +0000 UTC" firstStartedPulling="2026-01-22 12:10:43.540541841 +0000 UTC m=+951.118657666" lastFinishedPulling="2026-01-22 12:10:44.420084796 +0000 UTC m=+951.998200621" observedRunningTime="2026-01-22 12:10:45.197635218 +0000 UTC m=+952.775751063" watchObservedRunningTime="2026-01-22 12:10:45.200776127 +0000 UTC m=+952.778891952" Jan 22 12:10:45 crc kubenswrapper[4773]: I0122 12:10:45.213335 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2xpch"] Jan 22 12:10:45 crc kubenswrapper[4773]: I0122 12:10:45.218886 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2xpch"] Jan 22 12:10:46 crc kubenswrapper[4773]: I0122 12:10:46.671362 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" path="/var/lib/kubelet/pods/88132aba-cb2d-4fd5-a26d-eb8fcc81d198/volumes" Jan 22 12:10:46 crc kubenswrapper[4773]: I0122 12:10:46.703895 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-vjw6f"] Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.198758 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-vjw6f" podUID="5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" containerName="registry-server" containerID="cri-o://b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098" gracePeriod=2 Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.505597 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jvwbz"] Jan 22 12:10:47 crc kubenswrapper[4773]: E0122 12:10:47.505851 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="extract-utilities" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.505862 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="extract-utilities" Jan 22 12:10:47 crc kubenswrapper[4773]: E0122 12:10:47.505887 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="registry-server" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.505901 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="registry-server" Jan 22 12:10:47 crc kubenswrapper[4773]: E0122 12:10:47.505919 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="extract-content" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.505928 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="extract-content" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.506214 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="88132aba-cb2d-4fd5-a26d-eb8fcc81d198" containerName="registry-server" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.506622 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.520128 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jvwbz"] Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.573705 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.646553 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wlbs\" (UniqueName: \"kubernetes.io/projected/d86424e7-7383-4bbe-8610-2ac8fdc8143a-kube-api-access-6wlbs\") pod \"openstack-operator-index-jvwbz\" (UID: \"d86424e7-7383-4bbe-8610-2ac8fdc8143a\") " pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.747261 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxw5q\" (UniqueName: \"kubernetes.io/projected/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074-kube-api-access-dxw5q\") pod \"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074\" (UID: \"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074\") " Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.748272 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wlbs\" (UniqueName: \"kubernetes.io/projected/d86424e7-7383-4bbe-8610-2ac8fdc8143a-kube-api-access-6wlbs\") pod \"openstack-operator-index-jvwbz\" (UID: \"d86424e7-7383-4bbe-8610-2ac8fdc8143a\") " pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.752329 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074-kube-api-access-dxw5q" (OuterVolumeSpecName: "kube-api-access-dxw5q") pod "5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" (UID: "5cb765b7-3f9f-4e7f-8d08-9d95bfefc074"). InnerVolumeSpecName "kube-api-access-dxw5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.766691 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wlbs\" (UniqueName: \"kubernetes.io/projected/d86424e7-7383-4bbe-8610-2ac8fdc8143a-kube-api-access-6wlbs\") pod \"openstack-operator-index-jvwbz\" (UID: \"d86424e7-7383-4bbe-8610-2ac8fdc8143a\") " pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.828686 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:47 crc kubenswrapper[4773]: I0122 12:10:47.849196 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxw5q\" (UniqueName: \"kubernetes.io/projected/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074-kube-api-access-dxw5q\") on node \"crc\" DevicePath \"\"" Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.199630 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jvwbz"] Jan 22 12:10:48 crc kubenswrapper[4773]: W0122 12:10:48.204477 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd86424e7_7383_4bbe_8610_2ac8fdc8143a.slice/crio-579015d1e59aab17bc67613f724a1c083e6542b058bf9cfd37668d24b757089a WatchSource:0}: Error finding container 579015d1e59aab17bc67613f724a1c083e6542b058bf9cfd37668d24b757089a: Status 404 returned error can't find the container with id 579015d1e59aab17bc67613f724a1c083e6542b058bf9cfd37668d24b757089a Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.206017 4773 generic.go:334] "Generic (PLEG): container finished" podID="5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" containerID="b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098" exitCode=0 Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.206050 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vjw6f" event={"ID":"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074","Type":"ContainerDied","Data":"b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098"} Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.206074 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vjw6f" event={"ID":"5cb765b7-3f9f-4e7f-8d08-9d95bfefc074","Type":"ContainerDied","Data":"94940968ae8f3ec599ba196f358a1e9ccd71ec25f3f6686ba84fc1891d551984"} Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.206090 4773 scope.go:117] "RemoveContainer" containerID="b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098" Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.206147 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vjw6f" Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.222586 4773 scope.go:117] "RemoveContainer" containerID="b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098" Jan 22 12:10:48 crc kubenswrapper[4773]: E0122 12:10:48.223391 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098\": container with ID starting with b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098 not found: ID does not exist" containerID="b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098" Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.223447 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098"} err="failed to get container status \"b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098\": rpc error: code = NotFound desc = could not find container \"b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098\": container with ID starting with b7ae5fa4e7777ee14d9bc89956e6807177a0ead06efb63c66ac71f21b6531098 not found: ID does not exist" Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.237405 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-vjw6f"] Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.241623 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-vjw6f"] Jan 22 12:10:48 crc kubenswrapper[4773]: I0122 12:10:48.669798 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" path="/var/lib/kubelet/pods/5cb765b7-3f9f-4e7f-8d08-9d95bfefc074/volumes" Jan 22 12:10:49 crc kubenswrapper[4773]: I0122 12:10:49.215779 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jvwbz" event={"ID":"d86424e7-7383-4bbe-8610-2ac8fdc8143a","Type":"ContainerStarted","Data":"1313a6022ecc24798ed955955a52677c39722eccf7da13b7dc95ff7f23300070"} Jan 22 12:10:49 crc kubenswrapper[4773]: I0122 12:10:49.215834 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jvwbz" event={"ID":"d86424e7-7383-4bbe-8610-2ac8fdc8143a","Type":"ContainerStarted","Data":"579015d1e59aab17bc67613f724a1c083e6542b058bf9cfd37668d24b757089a"} Jan 22 12:10:49 crc kubenswrapper[4773]: I0122 12:10:49.234197 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jvwbz" podStartSLOduration=1.809842262 podStartE2EDuration="2.23417548s" podCreationTimestamp="2026-01-22 12:10:47 +0000 UTC" firstStartedPulling="2026-01-22 12:10:48.208569847 +0000 UTC m=+955.786685672" lastFinishedPulling="2026-01-22 12:10:48.632903055 +0000 UTC m=+956.211018890" observedRunningTime="2026-01-22 12:10:49.234087658 +0000 UTC m=+956.812203513" watchObservedRunningTime="2026-01-22 12:10:49.23417548 +0000 UTC m=+956.812291315" Jan 22 12:10:57 crc kubenswrapper[4773]: I0122 12:10:57.828964 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:57 crc kubenswrapper[4773]: I0122 12:10:57.829678 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:57 crc kubenswrapper[4773]: I0122 12:10:57.875056 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:10:58 crc kubenswrapper[4773]: I0122 12:10:58.300572 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-jvwbz" Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.075646 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.076617 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.076684 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.077744 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a884f8e7ecd2462a2aab65c47dccafce52794662c52b09332c27098b394743b2"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.077831 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://a884f8e7ecd2462a2aab65c47dccafce52794662c52b09332c27098b394743b2" gracePeriod=600 Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.313794 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="a884f8e7ecd2462a2aab65c47dccafce52794662c52b09332c27098b394743b2" exitCode=0 Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.313839 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"a884f8e7ecd2462a2aab65c47dccafce52794662c52b09332c27098b394743b2"} Jan 22 12:11:04 crc kubenswrapper[4773]: I0122 12:11:04.313875 4773 scope.go:117] "RemoveContainer" containerID="7882b6d2596720092fbfa9cbf9f782df49bd4b06ac71c0c20c36688e60fbb228" Jan 22 12:11:05 crc kubenswrapper[4773]: I0122 12:11:05.322563 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"7e399a29260ad477e9daad28c8b04af4cd0be5f90f0da32bc9266d74b4dcc71d"} Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.338860 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh"] Jan 22 12:11:06 crc kubenswrapper[4773]: E0122 12:11:06.339079 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" containerName="registry-server" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.339090 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" containerName="registry-server" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.339202 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb765b7-3f9f-4e7f-8d08-9d95bfefc074" containerName="registry-server" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.339996 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.341887 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-ww6s8" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.352035 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh"] Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.507578 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-util\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.507849 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9t7r\" (UniqueName: \"kubernetes.io/projected/8948768d-c93c-4a29-a93c-8c449a1980c3-kube-api-access-q9t7r\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.508058 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-bundle\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.609245 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-util\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.609385 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9t7r\" (UniqueName: \"kubernetes.io/projected/8948768d-c93c-4a29-a93c-8c449a1980c3-kube-api-access-q9t7r\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.609434 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-bundle\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.609991 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-bundle\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.610524 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-util\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.629000 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9t7r\" (UniqueName: \"kubernetes.io/projected/8948768d-c93c-4a29-a93c-8c449a1980c3-kube-api-access-q9t7r\") pod \"fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:06 crc kubenswrapper[4773]: I0122 12:11:06.668537 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:07 crc kubenswrapper[4773]: I0122 12:11:07.052201 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh"] Jan 22 12:11:07 crc kubenswrapper[4773]: I0122 12:11:07.338905 4773 generic.go:334] "Generic (PLEG): container finished" podID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerID="38bc95ee056922e496c480ddb072861d652634e9517dd70a7fee318a983c6709" exitCode=0 Jan 22 12:11:07 crc kubenswrapper[4773]: I0122 12:11:07.338951 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" event={"ID":"8948768d-c93c-4a29-a93c-8c449a1980c3","Type":"ContainerDied","Data":"38bc95ee056922e496c480ddb072861d652634e9517dd70a7fee318a983c6709"} Jan 22 12:11:07 crc kubenswrapper[4773]: I0122 12:11:07.338980 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" event={"ID":"8948768d-c93c-4a29-a93c-8c449a1980c3","Type":"ContainerStarted","Data":"e50e919703db75541b13a48d71ad996328f36a22f1a2036a70714b5b6b89fb95"} Jan 22 12:11:08 crc kubenswrapper[4773]: I0122 12:11:08.349285 4773 generic.go:334] "Generic (PLEG): container finished" podID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerID="d9ed4c706a644a9080552007c7fd9ffe6e75c7cb1f3c3016873e9d253d499f0f" exitCode=0 Jan 22 12:11:08 crc kubenswrapper[4773]: I0122 12:11:08.349394 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" event={"ID":"8948768d-c93c-4a29-a93c-8c449a1980c3","Type":"ContainerDied","Data":"d9ed4c706a644a9080552007c7fd9ffe6e75c7cb1f3c3016873e9d253d499f0f"} Jan 22 12:11:09 crc kubenswrapper[4773]: I0122 12:11:09.362969 4773 generic.go:334] "Generic (PLEG): container finished" podID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerID="8f8d737f42b2e706915169f3afef7b89edefe0081190afe4ffe43853f257c2cc" exitCode=0 Jan 22 12:11:09 crc kubenswrapper[4773]: I0122 12:11:09.363096 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" event={"ID":"8948768d-c93c-4a29-a93c-8c449a1980c3","Type":"ContainerDied","Data":"8f8d737f42b2e706915169f3afef7b89edefe0081190afe4ffe43853f257c2cc"} Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.666222 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.763821 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9t7r\" (UniqueName: \"kubernetes.io/projected/8948768d-c93c-4a29-a93c-8c449a1980c3-kube-api-access-q9t7r\") pod \"8948768d-c93c-4a29-a93c-8c449a1980c3\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.763883 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-bundle\") pod \"8948768d-c93c-4a29-a93c-8c449a1980c3\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.763966 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-util\") pod \"8948768d-c93c-4a29-a93c-8c449a1980c3\" (UID: \"8948768d-c93c-4a29-a93c-8c449a1980c3\") " Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.764929 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-bundle" (OuterVolumeSpecName: "bundle") pod "8948768d-c93c-4a29-a93c-8c449a1980c3" (UID: "8948768d-c93c-4a29-a93c-8c449a1980c3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.769879 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8948768d-c93c-4a29-a93c-8c449a1980c3-kube-api-access-q9t7r" (OuterVolumeSpecName: "kube-api-access-q9t7r") pod "8948768d-c93c-4a29-a93c-8c449a1980c3" (UID: "8948768d-c93c-4a29-a93c-8c449a1980c3"). InnerVolumeSpecName "kube-api-access-q9t7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.778588 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-util" (OuterVolumeSpecName: "util") pod "8948768d-c93c-4a29-a93c-8c449a1980c3" (UID: "8948768d-c93c-4a29-a93c-8c449a1980c3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.865450 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9t7r\" (UniqueName: \"kubernetes.io/projected/8948768d-c93c-4a29-a93c-8c449a1980c3-kube-api-access-q9t7r\") on node \"crc\" DevicePath \"\"" Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.865500 4773 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:11:10 crc kubenswrapper[4773]: I0122 12:11:10.865513 4773 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8948768d-c93c-4a29-a93c-8c449a1980c3-util\") on node \"crc\" DevicePath \"\"" Jan 22 12:11:11 crc kubenswrapper[4773]: I0122 12:11:11.380085 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" event={"ID":"8948768d-c93c-4a29-a93c-8c449a1980c3","Type":"ContainerDied","Data":"e50e919703db75541b13a48d71ad996328f36a22f1a2036a70714b5b6b89fb95"} Jan 22 12:11:11 crc kubenswrapper[4773]: I0122 12:11:11.380153 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e50e919703db75541b13a48d71ad996328f36a22f1a2036a70714b5b6b89fb95" Jan 22 12:11:11 crc kubenswrapper[4773]: I0122 12:11:11.380275 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.190825 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97"] Jan 22 12:11:19 crc kubenswrapper[4773]: E0122 12:11:19.191513 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="util" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.191525 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="util" Jan 22 12:11:19 crc kubenswrapper[4773]: E0122 12:11:19.191537 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="extract" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.191543 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="extract" Jan 22 12:11:19 crc kubenswrapper[4773]: E0122 12:11:19.191551 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="pull" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.191557 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="pull" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.191658 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8948768d-c93c-4a29-a93c-8c449a1980c3" containerName="extract" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.192042 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.194394 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-7kc6m" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.236766 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97"] Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.274350 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn7ng\" (UniqueName: \"kubernetes.io/projected/5ddce1ac-9a5f-4096-b21f-77dc07b68c2d-kube-api-access-vn7ng\") pod \"openstack-operator-controller-init-698d6bb84b-m5n97\" (UID: \"5ddce1ac-9a5f-4096-b21f-77dc07b68c2d\") " pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.375634 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn7ng\" (UniqueName: \"kubernetes.io/projected/5ddce1ac-9a5f-4096-b21f-77dc07b68c2d-kube-api-access-vn7ng\") pod \"openstack-operator-controller-init-698d6bb84b-m5n97\" (UID: \"5ddce1ac-9a5f-4096-b21f-77dc07b68c2d\") " pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.393214 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn7ng\" (UniqueName: \"kubernetes.io/projected/5ddce1ac-9a5f-4096-b21f-77dc07b68c2d-kube-api-access-vn7ng\") pod \"openstack-operator-controller-init-698d6bb84b-m5n97\" (UID: \"5ddce1ac-9a5f-4096-b21f-77dc07b68c2d\") " pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:11:19 crc kubenswrapper[4773]: I0122 12:11:19.508713 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:11:20 crc kubenswrapper[4773]: I0122 12:11:20.274555 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97"] Jan 22 12:11:20 crc kubenswrapper[4773]: I0122 12:11:20.437251 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" event={"ID":"5ddce1ac-9a5f-4096-b21f-77dc07b68c2d","Type":"ContainerStarted","Data":"04592b5bb82321a4cb16f0e7bcf8f8b26b050f7d8df2ccfbbe5b9cb42ce05368"} Jan 22 12:11:25 crc kubenswrapper[4773]: I0122 12:11:25.502215 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" event={"ID":"5ddce1ac-9a5f-4096-b21f-77dc07b68c2d","Type":"ContainerStarted","Data":"d29654f9a43df9fdcba43d871058d3638a3f9f59a03cb5ebef828288dfaa80de"} Jan 22 12:11:25 crc kubenswrapper[4773]: I0122 12:11:25.502767 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:11:25 crc kubenswrapper[4773]: I0122 12:11:25.533106 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" podStartSLOduration=2.461509977 podStartE2EDuration="6.533090994s" podCreationTimestamp="2026-01-22 12:11:19 +0000 UTC" firstStartedPulling="2026-01-22 12:11:20.294890642 +0000 UTC m=+987.873006467" lastFinishedPulling="2026-01-22 12:11:24.366471659 +0000 UTC m=+991.944587484" observedRunningTime="2026-01-22 12:11:25.529990276 +0000 UTC m=+993.108106091" watchObservedRunningTime="2026-01-22 12:11:25.533090994 +0000 UTC m=+993.111206809" Jan 22 12:11:29 crc kubenswrapper[4773]: I0122 12:11:29.510928 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-698d6bb84b-m5n97" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.204639 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.205991 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.209419 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-njb68" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.210150 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.210888 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.214413 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-bwdcm" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.226498 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.227229 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.231347 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjd9k\" (UniqueName: \"kubernetes.io/projected/73fae2b3-a45d-431f-9113-7f669d5eff6d-kube-api-access-cjd9k\") pod \"barbican-operator-controller-manager-59dd8b7cbf-9nlp4\" (UID: \"73fae2b3-a45d-431f-9113-7f669d5eff6d\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.231392 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hw2v\" (UniqueName: \"kubernetes.io/projected/e0e2887e-f5a6-48e6-862c-593b909d5514-kube-api-access-9hw2v\") pod \"cinder-operator-controller-manager-69cf5d4557-k7bqw\" (UID: \"e0e2887e-f5a6-48e6-862c-593b909d5514\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.231450 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4lhd\" (UniqueName: \"kubernetes.io/projected/4429281b-22db-4df9-8c28-bb30e527b1f6-kube-api-access-g4lhd\") pod \"designate-operator-controller-manager-b45d7bf98-vkt5m\" (UID: \"4429281b-22db-4df9-8c28-bb30e527b1f6\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.233679 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.234879 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-2z669" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.265708 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.266427 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.271012 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-8rwcb" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.271173 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.271940 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.274532 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.275072 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-vs78s" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.280241 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.282214 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.286824 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-pnbwr" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.312385 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.326432 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.349029 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjd9k\" (UniqueName: \"kubernetes.io/projected/73fae2b3-a45d-431f-9113-7f669d5eff6d-kube-api-access-cjd9k\") pod \"barbican-operator-controller-manager-59dd8b7cbf-9nlp4\" (UID: \"73fae2b3-a45d-431f-9113-7f669d5eff6d\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.351744 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hw2v\" (UniqueName: \"kubernetes.io/projected/e0e2887e-f5a6-48e6-862c-593b909d5514-kube-api-access-9hw2v\") pod \"cinder-operator-controller-manager-69cf5d4557-k7bqw\" (UID: \"e0e2887e-f5a6-48e6-862c-593b909d5514\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.354738 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4lhd\" (UniqueName: \"kubernetes.io/projected/4429281b-22db-4df9-8c28-bb30e527b1f6-kube-api-access-g4lhd\") pod \"designate-operator-controller-manager-b45d7bf98-vkt5m\" (UID: \"4429281b-22db-4df9-8c28-bb30e527b1f6\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.355168 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.398624 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjd9k\" (UniqueName: \"kubernetes.io/projected/73fae2b3-a45d-431f-9113-7f669d5eff6d-kube-api-access-cjd9k\") pod \"barbican-operator-controller-manager-59dd8b7cbf-9nlp4\" (UID: \"73fae2b3-a45d-431f-9113-7f669d5eff6d\") " pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.400490 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hw2v\" (UniqueName: \"kubernetes.io/projected/e0e2887e-f5a6-48e6-862c-593b909d5514-kube-api-access-9hw2v\") pod \"cinder-operator-controller-manager-69cf5d4557-k7bqw\" (UID: \"e0e2887e-f5a6-48e6-862c-593b909d5514\") " pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.400553 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.401420 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4lhd\" (UniqueName: \"kubernetes.io/projected/4429281b-22db-4df9-8c28-bb30e527b1f6-kube-api-access-g4lhd\") pod \"designate-operator-controller-manager-b45d7bf98-vkt5m\" (UID: \"4429281b-22db-4df9-8c28-bb30e527b1f6\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.401696 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.408656 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-scp2x" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.408784 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.420345 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.440117 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.440978 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.447727 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-d75qn" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.452343 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.456980 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zczw8\" (UniqueName: \"kubernetes.io/projected/a1965d1a-cb7f-4da6-90ab-1f75449e3e97-kube-api-access-zczw8\") pod \"glance-operator-controller-manager-78fdd796fd-lm8jc\" (UID: \"a1965d1a-cb7f-4da6-90ab-1f75449e3e97\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.457041 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwdqr\" (UniqueName: \"kubernetes.io/projected/54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0-kube-api-access-nwdqr\") pod \"heat-operator-controller-manager-594c8c9d5d-br2hg\" (UID: \"54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.457075 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rflxh\" (UniqueName: \"kubernetes.io/projected/9aeccfa1-86f0-47e3-96c7-e0d018d24537-kube-api-access-rflxh\") pod \"horizon-operator-controller-manager-77d5c5b54f-ztsqs\" (UID: \"9aeccfa1-86f0-47e3-96c7-e0d018d24537\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.460783 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.461604 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.466688 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6446d" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.475705 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.478055 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.480330 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-jbtlz" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.485114 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.495465 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.527623 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.534347 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.540546 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.545982 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.547064 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.551720 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-vqk77" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.551800 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.554979 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.556706 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.558009 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zczw8\" (UniqueName: \"kubernetes.io/projected/a1965d1a-cb7f-4da6-90ab-1f75449e3e97-kube-api-access-zczw8\") pod \"glance-operator-controller-manager-78fdd796fd-lm8jc\" (UID: \"a1965d1a-cb7f-4da6-90ab-1f75449e3e97\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.558071 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwdqr\" (UniqueName: \"kubernetes.io/projected/54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0-kube-api-access-nwdqr\") pod \"heat-operator-controller-manager-594c8c9d5d-br2hg\" (UID: \"54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.558095 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rflxh\" (UniqueName: \"kubernetes.io/projected/9aeccfa1-86f0-47e3-96c7-e0d018d24537-kube-api-access-rflxh\") pod \"horizon-operator-controller-manager-77d5c5b54f-ztsqs\" (UID: \"9aeccfa1-86f0-47e3-96c7-e0d018d24537\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.558116 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w46sj\" (UniqueName: \"kubernetes.io/projected/b6ef28d3-92bf-43a7-a577-c4ac162ab48a-kube-api-access-w46sj\") pod \"ironic-operator-controller-manager-69d6c9f5b8-7mdlm\" (UID: \"b6ef28d3-92bf-43a7-a577-c4ac162ab48a\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.558144 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.558170 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txw75\" (UniqueName: \"kubernetes.io/projected/29637656-53e6-4957-88ea-2445b706ec08-kube-api-access-txw75\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.563883 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-xcmvn" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.579200 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.590215 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rflxh\" (UniqueName: \"kubernetes.io/projected/9aeccfa1-86f0-47e3-96c7-e0d018d24537-kube-api-access-rflxh\") pod \"horizon-operator-controller-manager-77d5c5b54f-ztsqs\" (UID: \"9aeccfa1-86f0-47e3-96c7-e0d018d24537\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.592258 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.610673 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwdqr\" (UniqueName: \"kubernetes.io/projected/54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0-kube-api-access-nwdqr\") pod \"heat-operator-controller-manager-594c8c9d5d-br2hg\" (UID: \"54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.610936 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.611834 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zczw8\" (UniqueName: \"kubernetes.io/projected/a1965d1a-cb7f-4da6-90ab-1f75449e3e97-kube-api-access-zczw8\") pod \"glance-operator-controller-manager-78fdd796fd-lm8jc\" (UID: \"a1965d1a-cb7f-4da6-90ab-1f75449e3e97\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.612003 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.624261 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.625007 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-tmjwq" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.627762 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.642715 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.644952 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.649326 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.651758 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-q5cnw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.664250 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp4bv\" (UniqueName: \"kubernetes.io/projected/02d2e417-7591-4d07-850e-4c670b40d1ea-kube-api-access-zp4bv\") pod \"keystone-operator-controller-manager-b8b6d4659-x6894\" (UID: \"02d2e417-7591-4d07-850e-4c670b40d1ea\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.664321 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf5px\" (UniqueName: \"kubernetes.io/projected/6e27f8a3-a214-47ec-9027-1a503d588e59-kube-api-access-vf5px\") pod \"neutron-operator-controller-manager-5d8f59fb49-vlvhw\" (UID: \"6e27f8a3-a214-47ec-9027-1a503d588e59\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.666533 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w46sj\" (UniqueName: \"kubernetes.io/projected/b6ef28d3-92bf-43a7-a577-c4ac162ab48a-kube-api-access-w46sj\") pod \"ironic-operator-controller-manager-69d6c9f5b8-7mdlm\" (UID: \"b6ef28d3-92bf-43a7-a577-c4ac162ab48a\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.666587 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb9s9\" (UniqueName: \"kubernetes.io/projected/be3892a0-8b94-459b-9d05-9aae47107554-kube-api-access-gb9s9\") pod \"mariadb-operator-controller-manager-c87fff755-rjsjw\" (UID: \"be3892a0-8b94-459b-9d05-9aae47107554\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.666613 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.666659 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txw75\" (UniqueName: \"kubernetes.io/projected/29637656-53e6-4957-88ea-2445b706ec08-kube-api-access-txw75\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.666710 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2wpc\" (UniqueName: \"kubernetes.io/projected/cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e-kube-api-access-g2wpc\") pod \"manila-operator-controller-manager-78c6999f6f-sd745\" (UID: \"cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:06 crc kubenswrapper[4773]: E0122 12:12:06.666837 4773 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:06 crc kubenswrapper[4773]: E0122 12:12:06.666904 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert podName:29637656-53e6-4957-88ea-2445b706ec08 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:07.166885019 +0000 UTC m=+1034.745000904 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert") pod "infra-operator-controller-manager-54ccf4f85d-dlzxj" (UID: "29637656-53e6-4957-88ea-2445b706ec08") : secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.689252 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w46sj\" (UniqueName: \"kubernetes.io/projected/b6ef28d3-92bf-43a7-a577-c4ac162ab48a-kube-api-access-w46sj\") pod \"ironic-operator-controller-manager-69d6c9f5b8-7mdlm\" (UID: \"b6ef28d3-92bf-43a7-a577-c4ac162ab48a\") " pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.691266 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txw75\" (UniqueName: \"kubernetes.io/projected/29637656-53e6-4957-88ea-2445b706ec08-kube-api-access-txw75\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.700921 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.701881 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.702440 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.703001 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.705172 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.709064 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-p7xxl" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.709183 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.709332 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-qcq9p" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.728567 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.729605 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.742410 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.743892 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.744948 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-7czw5" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.773384 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.774317 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2wpc\" (UniqueName: \"kubernetes.io/projected/cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e-kube-api-access-g2wpc\") pod \"manila-operator-controller-manager-78c6999f6f-sd745\" (UID: \"cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.774463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52c22\" (UniqueName: \"kubernetes.io/projected/639db363-b628-4c24-be20-57a1bf05c986-kube-api-access-52c22\") pod \"nova-operator-controller-manager-6b8bc8d87d-t4ws7\" (UID: \"639db363-b628-4c24-be20-57a1bf05c986\") " pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.774573 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp4bv\" (UniqueName: \"kubernetes.io/projected/02d2e417-7591-4d07-850e-4c670b40d1ea-kube-api-access-zp4bv\") pod \"keystone-operator-controller-manager-b8b6d4659-x6894\" (UID: \"02d2e417-7591-4d07-850e-4c670b40d1ea\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.774654 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn62v\" (UniqueName: \"kubernetes.io/projected/85ad5e61-8c92-4856-a34c-7d02aadbbc43-kube-api-access-zn62v\") pod \"octavia-operator-controller-manager-7bd9774b6-qwf6d\" (UID: \"85ad5e61-8c92-4856-a34c-7d02aadbbc43\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.774743 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf5px\" (UniqueName: \"kubernetes.io/projected/6e27f8a3-a214-47ec-9027-1a503d588e59-kube-api-access-vf5px\") pod \"neutron-operator-controller-manager-5d8f59fb49-vlvhw\" (UID: \"6e27f8a3-a214-47ec-9027-1a503d588e59\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.774842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb9s9\" (UniqueName: \"kubernetes.io/projected/be3892a0-8b94-459b-9d05-9aae47107554-kube-api-access-gb9s9\") pod \"mariadb-operator-controller-manager-c87fff755-rjsjw\" (UID: \"be3892a0-8b94-459b-9d05-9aae47107554\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.813438 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.891383 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn"] Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.893203 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn62v\" (UniqueName: \"kubernetes.io/projected/85ad5e61-8c92-4856-a34c-7d02aadbbc43-kube-api-access-zn62v\") pod \"octavia-operator-controller-manager-7bd9774b6-qwf6d\" (UID: \"85ad5e61-8c92-4856-a34c-7d02aadbbc43\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.893372 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2njq\" (UniqueName: \"kubernetes.io/projected/7e2cd62a-874a-4d1b-a706-439c0e7756c0-kube-api-access-g2njq\") pod \"swift-operator-controller-manager-547cbdb99f-msvwn\" (UID: \"7e2cd62a-874a-4d1b-a706-439c0e7756c0\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.893461 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52c22\" (UniqueName: \"kubernetes.io/projected/639db363-b628-4c24-be20-57a1bf05c986-kube-api-access-52c22\") pod \"nova-operator-controller-manager-6b8bc8d87d-t4ws7\" (UID: \"639db363-b628-4c24-be20-57a1bf05c986\") " pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.983710 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.984206 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.987938 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-47ts5" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.994448 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.994525 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9bwr\" (UniqueName: \"kubernetes.io/projected/fd720cc2-9948-4a4d-951f-17a20558e0e2-kube-api-access-b9bwr\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.994608 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xsjs\" (UniqueName: \"kubernetes.io/projected/592864bf-34a6-4335-9425-72386e772818-kube-api-access-4xsjs\") pod \"placement-operator-controller-manager-5d646b7d76-h6ggd\" (UID: \"592864bf-34a6-4335-9425-72386e772818\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.994638 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2njq\" (UniqueName: \"kubernetes.io/projected/7e2cd62a-874a-4d1b-a706-439c0e7756c0-kube-api-access-g2njq\") pod \"swift-operator-controller-manager-547cbdb99f-msvwn\" (UID: \"7e2cd62a-874a-4d1b-a706-439c0e7756c0\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:06 crc kubenswrapper[4773]: I0122 12:12:06.994684 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpkhm\" (UniqueName: \"kubernetes.io/projected/813b6b20-e15b-4da5-959a-d719f973a4e5-kube-api-access-tpkhm\") pod \"ovn-operator-controller-manager-55db956ddc-lx75p\" (UID: \"813b6b20-e15b-4da5-959a-d719f973a4e5\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.011761 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2wpc\" (UniqueName: \"kubernetes.io/projected/cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e-kube-api-access-g2wpc\") pod \"manila-operator-controller-manager-78c6999f6f-sd745\" (UID: \"cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.021398 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52c22\" (UniqueName: \"kubernetes.io/projected/639db363-b628-4c24-be20-57a1bf05c986-kube-api-access-52c22\") pod \"nova-operator-controller-manager-6b8bc8d87d-t4ws7\" (UID: \"639db363-b628-4c24-be20-57a1bf05c986\") " pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.022850 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf5px\" (UniqueName: \"kubernetes.io/projected/6e27f8a3-a214-47ec-9027-1a503d588e59-kube-api-access-vf5px\") pod \"neutron-operator-controller-manager-5d8f59fb49-vlvhw\" (UID: \"6e27f8a3-a214-47ec-9027-1a503d588e59\") " pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.058034 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp4bv\" (UniqueName: \"kubernetes.io/projected/02d2e417-7591-4d07-850e-4c670b40d1ea-kube-api-access-zp4bv\") pod \"keystone-operator-controller-manager-b8b6d4659-x6894\" (UID: \"02d2e417-7591-4d07-850e-4c670b40d1ea\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.217725 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb9s9\" (UniqueName: \"kubernetes.io/projected/be3892a0-8b94-459b-9d05-9aae47107554-kube-api-access-gb9s9\") pod \"mariadb-operator-controller-manager-c87fff755-rjsjw\" (UID: \"be3892a0-8b94-459b-9d05-9aae47107554\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.220224 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.220294 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xsjs\" (UniqueName: \"kubernetes.io/projected/592864bf-34a6-4335-9425-72386e772818-kube-api-access-4xsjs\") pod \"placement-operator-controller-manager-5d646b7d76-h6ggd\" (UID: \"592864bf-34a6-4335-9425-72386e772818\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:07 crc kubenswrapper[4773]: E0122 12:12:07.221086 4773 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:07 crc kubenswrapper[4773]: E0122 12:12:07.221314 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert podName:29637656-53e6-4957-88ea-2445b706ec08 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:08.221264983 +0000 UTC m=+1035.799380808 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert") pod "infra-operator-controller-manager-54ccf4f85d-dlzxj" (UID: "29637656-53e6-4957-88ea-2445b706ec08") : secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.221698 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn62v\" (UniqueName: \"kubernetes.io/projected/85ad5e61-8c92-4856-a34c-7d02aadbbc43-kube-api-access-zn62v\") pod \"octavia-operator-controller-manager-7bd9774b6-qwf6d\" (UID: \"85ad5e61-8c92-4856-a34c-7d02aadbbc43\") " pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.249475 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2njq\" (UniqueName: \"kubernetes.io/projected/7e2cd62a-874a-4d1b-a706-439c0e7756c0-kube-api-access-g2njq\") pod \"swift-operator-controller-manager-547cbdb99f-msvwn\" (UID: \"7e2cd62a-874a-4d1b-a706-439c0e7756c0\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.274364 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.278319 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.295591 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.317481 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.337351 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd"] Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.363200 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.363756 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xsjs\" (UniqueName: \"kubernetes.io/projected/592864bf-34a6-4335-9425-72386e772818-kube-api-access-4xsjs\") pod \"placement-operator-controller-manager-5d646b7d76-h6ggd\" (UID: \"592864bf-34a6-4335-9425-72386e772818\") " pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.363861 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5"] Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.637251 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.638907 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2njq\" (UniqueName: \"kubernetes.io/projected/7e2cd62a-874a-4d1b-a706-439c0e7756c0-kube-api-access-g2njq\") pod \"swift-operator-controller-manager-547cbdb99f-msvwn\" (UID: \"7e2cd62a-874a-4d1b-a706-439c0e7756c0\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.639573 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2njq\" (UniqueName: \"kubernetes.io/projected/7e2cd62a-874a-4d1b-a706-439c0e7756c0-kube-api-access-g2njq\") pod \"swift-operator-controller-manager-547cbdb99f-msvwn\" (UID: \"7e2cd62a-874a-4d1b-a706-439c0e7756c0\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.641205 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.896939 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.897660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpkhm\" (UniqueName: \"kubernetes.io/projected/813b6b20-e15b-4da5-959a-d719f973a4e5-kube-api-access-tpkhm\") pod \"ovn-operator-controller-manager-55db956ddc-lx75p\" (UID: \"813b6b20-e15b-4da5-959a-d719f973a4e5\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.897707 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.897766 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9bwr\" (UniqueName: \"kubernetes.io/projected/fd720cc2-9948-4a4d-951f-17a20558e0e2-kube-api-access-b9bwr\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:07 crc kubenswrapper[4773]: E0122 12:12:07.898500 4773 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:07 crc kubenswrapper[4773]: E0122 12:12:07.898557 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert podName:fd720cc2-9948-4a4d-951f-17a20558e0e2 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:08.398541084 +0000 UTC m=+1035.976656909 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert") pod "openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" (UID: "fd720cc2-9948-4a4d-951f-17a20558e0e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.905499 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.917183 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5"] Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.930615 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-tv2cp" Jan 22 12:12:07 crc kubenswrapper[4773]: I0122 12:12:07.950359 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9bwr\" (UniqueName: \"kubernetes.io/projected/fd720cc2-9948-4a4d-951f-17a20558e0e2-kube-api-access-b9bwr\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.050418 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.053767 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.053911 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.056142 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk798\" (UniqueName: \"kubernetes.io/projected/d4fe0850-2785-4433-8e0a-28efdea91b64-kube-api-access-gk798\") pod \"telemetry-operator-controller-manager-85cd9769bb-rx8n5\" (UID: \"d4fe0850-2785-4433-8e0a-28efdea91b64\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.083427 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.085603 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.085735 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.112765 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-k7bf9" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.112846 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-qx2wk" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.113816 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpkhm\" (UniqueName: \"kubernetes.io/projected/813b6b20-e15b-4da5-959a-d719f973a4e5-kube-api-access-tpkhm\") pod \"ovn-operator-controller-manager-55db956ddc-lx75p\" (UID: \"813b6b20-e15b-4da5-959a-d719f973a4e5\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.154977 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.155882 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.155960 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.168813 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvbmt\" (UniqueName: \"kubernetes.io/projected/4947f3f4-af75-45ed-9481-f4c8f3e525d8-kube-api-access-xvbmt\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.168881 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glhdq\" (UniqueName: \"kubernetes.io/projected/e817d93c-c5c3-44ed-92aa-e761dda7eaa9-kube-api-access-glhdq\") pod \"test-operator-controller-manager-69797bbcbd-9gcmw\" (UID: \"e817d93c-c5c3-44ed-92aa-e761dda7eaa9\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.168912 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk798\" (UniqueName: \"kubernetes.io/projected/d4fe0850-2785-4433-8e0a-28efdea91b64-kube-api-access-gk798\") pod \"telemetry-operator-controller-manager-85cd9769bb-rx8n5\" (UID: \"d4fe0850-2785-4433-8e0a-28efdea91b64\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.168956 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6kkw\" (UniqueName: \"kubernetes.io/projected/9731c1da-ba60-4e4a-af76-4e870c0f6e35-kube-api-access-c6kkw\") pod \"watcher-operator-controller-manager-5ffb9c6597-ptq9w\" (UID: \"9731c1da-ba60-4e4a-af76-4e870c0f6e35\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.168988 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.169006 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.179914 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.180238 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-lsdh8" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.180563 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.220602 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.270155 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glhdq\" (UniqueName: \"kubernetes.io/projected/e817d93c-c5c3-44ed-92aa-e761dda7eaa9-kube-api-access-glhdq\") pod \"test-operator-controller-manager-69797bbcbd-9gcmw\" (UID: \"e817d93c-c5c3-44ed-92aa-e761dda7eaa9\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.270366 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6kkw\" (UniqueName: \"kubernetes.io/projected/9731c1da-ba60-4e4a-af76-4e870c0f6e35-kube-api-access-c6kkw\") pod \"watcher-operator-controller-manager-5ffb9c6597-ptq9w\" (UID: \"9731c1da-ba60-4e4a-af76-4e870c0f6e35\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.270594 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.270615 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.270639 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.270692 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvbmt\" (UniqueName: \"kubernetes.io/projected/4947f3f4-af75-45ed-9481-f4c8f3e525d8-kube-api-access-xvbmt\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.271208 4773 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.271244 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:08.771231397 +0000 UTC m=+1036.349347222 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "metrics-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.271384 4773 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.271407 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:08.771400122 +0000 UTC m=+1036.349515947 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.271440 4773 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.271456 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert podName:29637656-53e6-4957-88ea-2445b706ec08 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:10.271450964 +0000 UTC m=+1037.849566789 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert") pod "infra-operator-controller-manager-54ccf4f85d-dlzxj" (UID: "29637656-53e6-4957-88ea-2445b706ec08") : secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.287384 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk798\" (UniqueName: \"kubernetes.io/projected/d4fe0850-2785-4433-8e0a-28efdea91b64-kube-api-access-gk798\") pod \"telemetry-operator-controller-manager-85cd9769bb-rx8n5\" (UID: \"d4fe0850-2785-4433-8e0a-28efdea91b64\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.310260 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glhdq\" (UniqueName: \"kubernetes.io/projected/e817d93c-c5c3-44ed-92aa-e761dda7eaa9-kube-api-access-glhdq\") pod \"test-operator-controller-manager-69797bbcbd-9gcmw\" (UID: \"e817d93c-c5c3-44ed-92aa-e761dda7eaa9\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.310333 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6kkw\" (UniqueName: \"kubernetes.io/projected/9731c1da-ba60-4e4a-af76-4e870c0f6e35-kube-api-access-c6kkw\") pod \"watcher-operator-controller-manager-5ffb9c6597-ptq9w\" (UID: \"9731c1da-ba60-4e4a-af76-4e870c0f6e35\") " pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.653878 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.658713 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.659547 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.659753 4773 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.659802 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert podName:fd720cc2-9948-4a4d-951f-17a20558e0e2 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:09.65978725 +0000 UTC m=+1037.237903075 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert") pod "openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" (UID: "fd720cc2-9948-4a4d-951f-17a20558e0e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.697608 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvbmt\" (UniqueName: \"kubernetes.io/projected/4947f3f4-af75-45ed-9481-f4c8f3e525d8-kube-api-access-xvbmt\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.709958 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.710937 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.718214 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.737872 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-8g4bm" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.750243 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.761583 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n79x\" (UniqueName: \"kubernetes.io/projected/7da5c99e-8a28-4671-b7fb-43ec8b4d6faf-kube-api-access-9n79x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qgnch\" (UID: \"7da5c99e-8a28-4671-b7fb-43ec8b4d6faf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.778088 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m"] Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.864256 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.864345 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.864424 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n79x\" (UniqueName: \"kubernetes.io/projected/7da5c99e-8a28-4671-b7fb-43ec8b4d6faf-kube-api-access-9n79x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qgnch\" (UID: \"7da5c99e-8a28-4671-b7fb-43ec8b4d6faf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.864960 4773 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.865054 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:09.865028317 +0000 UTC m=+1037.443144132 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "metrics-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.865093 4773 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: E0122 12:12:08.865162 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:09.86513909 +0000 UTC m=+1037.443254915 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "webhook-server-cert" not found Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.981582 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n79x\" (UniqueName: \"kubernetes.io/projected/7da5c99e-8a28-4671-b7fb-43ec8b4d6faf-kube-api-access-9n79x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qgnch\" (UID: \"7da5c99e-8a28-4671-b7fb-43ec8b4d6faf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" Jan 22 12:12:08 crc kubenswrapper[4773]: I0122 12:12:08.985312 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" Jan 22 12:12:09 crc kubenswrapper[4773]: W0122 12:12:09.131822 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4429281b_22db_4df9_8c28_bb30e527b1f6.slice/crio-1ecb7b2d875f7244256c389609d994810a8c32a3582acf2be089ba6ffd631f64 WatchSource:0}: Error finding container 1ecb7b2d875f7244256c389609d994810a8c32a3582acf2be089ba6ffd631f64: Status 404 returned error can't find the container with id 1ecb7b2d875f7244256c389609d994810a8c32a3582acf2be089ba6ffd631f64 Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.140628 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs"] Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.171363 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw"] Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.255077 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4"] Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.262602 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc"] Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.718996 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:09 crc kubenswrapper[4773]: E0122 12:12:09.719339 4773 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:09 crc kubenswrapper[4773]: E0122 12:12:09.719409 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert podName:fd720cc2-9948-4a4d-951f-17a20558e0e2 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:11.719388266 +0000 UTC m=+1039.297504091 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert") pod "openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" (UID: "fd720cc2-9948-4a4d-951f-17a20558e0e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.922162 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.922645 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:09 crc kubenswrapper[4773]: E0122 12:12:09.922694 4773 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 12:12:09 crc kubenswrapper[4773]: E0122 12:12:09.922790 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:11.92276344 +0000 UTC m=+1039.500879265 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "metrics-server-cert" not found Jan 22 12:12:09 crc kubenswrapper[4773]: E0122 12:12:09.923367 4773 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 12:12:09 crc kubenswrapper[4773]: E0122 12:12:09.923469 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:11.923446859 +0000 UTC m=+1039.501562764 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "webhook-server-cert" not found Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.964697 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" event={"ID":"4429281b-22db-4df9-8c28-bb30e527b1f6","Type":"ContainerStarted","Data":"1ecb7b2d875f7244256c389609d994810a8c32a3582acf2be089ba6ffd631f64"} Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.966733 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" event={"ID":"73fae2b3-a45d-431f-9113-7f669d5eff6d","Type":"ContainerStarted","Data":"22a6813ef3687f34ade7aa5c72a29b71370d9e66909fd03f4757cc2ff7c495bb"} Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.969235 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" event={"ID":"e0e2887e-f5a6-48e6-862c-593b909d5514","Type":"ContainerStarted","Data":"07b29aa5c7a11179c85d0c7f25e0af7f08236e5f5fe5115acdd1c883d2dfb5b5"} Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.976377 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" event={"ID":"9aeccfa1-86f0-47e3-96c7-e0d018d24537","Type":"ContainerStarted","Data":"53be1b699ddecfaceacf821f565e410a5ad514256719575fb6dde9aeab66781c"} Jan 22 12:12:09 crc kubenswrapper[4773]: I0122 12:12:09.977463 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" event={"ID":"a1965d1a-cb7f-4da6-90ab-1f75449e3e97","Type":"ContainerStarted","Data":"571f34b499eebce1125c0e220449b60bec37636c8a694c5bf31b5f551a55b2eb"} Jan 22 12:12:10 crc kubenswrapper[4773]: I0122 12:12:10.630857 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:10 crc kubenswrapper[4773]: E0122 12:12:10.630979 4773 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:10 crc kubenswrapper[4773]: E0122 12:12:10.631031 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert podName:29637656-53e6-4957-88ea-2445b706ec08 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:14.631010116 +0000 UTC m=+1042.209125941 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert") pod "infra-operator-controller-manager-54ccf4f85d-dlzxj" (UID: "29637656-53e6-4957-88ea-2445b706ec08") : secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:10 crc kubenswrapper[4773]: I0122 12:12:10.743121 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw"] Jan 22 12:12:10 crc kubenswrapper[4773]: I0122 12:12:10.790060 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm"] Jan 22 12:12:10 crc kubenswrapper[4773]: W0122 12:12:10.868627 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe3892a0_8b94_459b_9d05_9aae47107554.slice/crio-4edf29ac5188fe08553060745d160d9f51c07d0fa413670e6352f9aa9f178cf0 WatchSource:0}: Error finding container 4edf29ac5188fe08553060745d160d9f51c07d0fa413670e6352f9aa9f178cf0: Status 404 returned error can't find the container with id 4edf29ac5188fe08553060745d160d9f51c07d0fa413670e6352f9aa9f178cf0 Jan 22 12:12:10 crc kubenswrapper[4773]: I0122 12:12:10.991349 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.005991 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.019732 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" event={"ID":"be3892a0-8b94-459b-9d05-9aae47107554","Type":"ContainerStarted","Data":"4edf29ac5188fe08553060745d160d9f51c07d0fa413670e6352f9aa9f178cf0"} Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.021044 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" event={"ID":"b6ef28d3-92bf-43a7-a577-c4ac162ab48a","Type":"ContainerStarted","Data":"3c3cf5303dce455521414d91adc44dcca6824bf8fa0d0a099fa180de7f4caa9f"} Jan 22 12:12:11 crc kubenswrapper[4773]: W0122 12:12:11.028878 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfe59bee_7ac2_4f1f_ac59_ab2ae4b1c24e.slice/crio-8289061ffe07ea8f83ab74a82b43b6c5af510a230150ed024e97d32925fd8545 WatchSource:0}: Error finding container 8289061ffe07ea8f83ab74a82b43b6c5af510a230150ed024e97d32925fd8545: Status 404 returned error can't find the container with id 8289061ffe07ea8f83ab74a82b43b6c5af510a230150ed024e97d32925fd8545 Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.054467 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.062198 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745"] Jan 22 12:12:11 crc kubenswrapper[4773]: W0122 12:12:11.066937 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode817d93c_c5c3_44ed_92aa_e761dda7eaa9.slice/crio-3b1cc38df82a495907a568072f11006c7271f9eca5b29a6b1e1ac055a30043dc WatchSource:0}: Error finding container 3b1cc38df82a495907a568072f11006c7271f9eca5b29a6b1e1ac055a30043dc: Status 404 returned error can't find the container with id 3b1cc38df82a495907a568072f11006c7271f9eca5b29a6b1e1ac055a30043dc Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.071747 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw"] Jan 22 12:12:11 crc kubenswrapper[4773]: W0122 12:12:11.079600 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9731c1da_ba60_4e4a_af76_4e870c0f6e35.slice/crio-6d1761deb426ed493a45ccd92886bcc38c4857669e3030de1962364c5820910b WatchSource:0}: Error finding container 6d1761deb426ed493a45ccd92886bcc38c4857669e3030de1962364c5820910b: Status 404 returned error can't find the container with id 6d1761deb426ed493a45ccd92886bcc38c4857669e3030de1962364c5820910b Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.081391 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.087995 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch"] Jan 22 12:12:11 crc kubenswrapper[4773]: W0122 12:12:11.088628 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4fe0850_2785_4433_8e0a_28efdea91b64.slice/crio-22b82815d449b60ffe5442a5dcd3c52e121ce41b6d6975581936d1982b762184 WatchSource:0}: Error finding container 22b82815d449b60ffe5442a5dcd3c52e121ce41b6d6975581936d1982b762184: Status 404 returned error can't find the container with id 22b82815d449b60ffe5442a5dcd3c52e121ce41b6d6975581936d1982b762184 Jan 22 12:12:11 crc kubenswrapper[4773]: W0122 12:12:11.093474 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod592864bf_34a6_4335_9425_72386e772818.slice/crio-93a4fd16b7bae3f0f2fa3bb78106a85a0524d55f9f26413798bb2cb13c24898a WatchSource:0}: Error finding container 93a4fd16b7bae3f0f2fa3bb78106a85a0524d55f9f26413798bb2cb13c24898a: Status 404 returned error can't find the container with id 93a4fd16b7bae3f0f2fa3bb78106a85a0524d55f9f26413798bb2cb13c24898a Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.098663 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:65cfe5b9d5b0571aaf8ff9840b12cc56e90ca4cef162dd260c3a9fa2b52c6dd0,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4xsjs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5d646b7d76-h6ggd_openstack-operators(592864bf-34a6-4335-9425-72386e772818): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.100071 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" podUID="592864bf-34a6-4335-9425-72386e772818" Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.106642 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw"] Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.109425 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nwdqr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-594c8c9d5d-br2hg_openstack-operators(54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.110573 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" podUID="54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.118470 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zn62v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-7bd9774b6-qwf6d_openstack-operators(85ad5e61-8c92-4856-a34c-7d02aadbbc43): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.119964 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" podUID="85ad5e61-8c92-4856-a34c-7d02aadbbc43" Jan 22 12:12:11 crc kubenswrapper[4773]: W0122 12:12:11.120695 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02d2e417_7591_4d07_850e_4c670b40d1ea.slice/crio-b581035e9c6456a7b8e7d06bf87a59da2d689832da8dc6c938a809e4d9e395bb WatchSource:0}: Error finding container b581035e9c6456a7b8e7d06bf87a59da2d689832da8dc6c938a809e4d9e395bb: Status 404 returned error can't find the container with id b581035e9c6456a7b8e7d06bf87a59da2d689832da8dc6c938a809e4d9e395bb Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.125191 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.141422 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w"] Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.152027 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zp4bv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b8b6d4659-x6894_openstack-operators(02d2e417-7591-4d07-850e-4c670b40d1ea): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.153306 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" podUID="02d2e417-7591-4d07-850e-4c670b40d1ea" Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.172546 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.175816 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.194837 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d"] Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.766228 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.766917 4773 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.767002 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert podName:fd720cc2-9948-4a4d-951f-17a20558e0e2 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:15.766982016 +0000 UTC m=+1043.345097841 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert") pod "openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" (UID: "fd720cc2-9948-4a4d-951f-17a20558e0e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.969656 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:11 crc kubenswrapper[4773]: I0122 12:12:11.969724 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.969794 4773 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.969849 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:15.96983291 +0000 UTC m=+1043.547948735 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "metrics-server-cert" not found Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.969959 4773 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 12:12:11 crc kubenswrapper[4773]: E0122 12:12:11.969988 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:15.969980154 +0000 UTC m=+1043.548095979 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "webhook-server-cert" not found Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.034275 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" event={"ID":"85ad5e61-8c92-4856-a34c-7d02aadbbc43","Type":"ContainerStarted","Data":"e29ecec3b3070b723687475a87df4c44b1306e02a459bd033481bce06f88d00f"} Jan 22 12:12:12 crc kubenswrapper[4773]: E0122 12:12:12.039329 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" podUID="85ad5e61-8c92-4856-a34c-7d02aadbbc43" Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.043097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" event={"ID":"d4fe0850-2785-4433-8e0a-28efdea91b64","Type":"ContainerStarted","Data":"22b82815d449b60ffe5442a5dcd3c52e121ce41b6d6975581936d1982b762184"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.044325 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" event={"ID":"54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0","Type":"ContainerStarted","Data":"875f065c8d44cb4b45956784e27f0fc3a8e3feb980e4012ea9e81d7868d9a3db"} Jan 22 12:12:12 crc kubenswrapper[4773]: E0122 12:12:12.048487 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492\\\"\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" podUID="54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0" Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.048652 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" event={"ID":"02d2e417-7591-4d07-850e-4c670b40d1ea","Type":"ContainerStarted","Data":"b581035e9c6456a7b8e7d06bf87a59da2d689832da8dc6c938a809e4d9e395bb"} Jan 22 12:12:12 crc kubenswrapper[4773]: E0122 12:12:12.054519 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" podUID="02d2e417-7591-4d07-850e-4c670b40d1ea" Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.055470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" event={"ID":"7da5c99e-8a28-4671-b7fb-43ec8b4d6faf","Type":"ContainerStarted","Data":"aab4ab9c5862b27dd5a8d5b8c8002be3baae361db2c30a2db88dc1b46e84e78b"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.060598 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" event={"ID":"592864bf-34a6-4335-9425-72386e772818","Type":"ContainerStarted","Data":"93a4fd16b7bae3f0f2fa3bb78106a85a0524d55f9f26413798bb2cb13c24898a"} Jan 22 12:12:12 crc kubenswrapper[4773]: E0122 12:12:12.063807 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:65cfe5b9d5b0571aaf8ff9840b12cc56e90ca4cef162dd260c3a9fa2b52c6dd0\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" podUID="592864bf-34a6-4335-9425-72386e772818" Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.065470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" event={"ID":"e817d93c-c5c3-44ed-92aa-e761dda7eaa9","Type":"ContainerStarted","Data":"3b1cc38df82a495907a568072f11006c7271f9eca5b29a6b1e1ac055a30043dc"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.071027 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" event={"ID":"813b6b20-e15b-4da5-959a-d719f973a4e5","Type":"ContainerStarted","Data":"3f8587f8bbe6a2b214298127cfda867bb9b3d828c0d1c90139bbf2342caee41d"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.077415 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" event={"ID":"cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e","Type":"ContainerStarted","Data":"8289061ffe07ea8f83ab74a82b43b6c5af510a230150ed024e97d32925fd8545"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.085775 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" event={"ID":"9731c1da-ba60-4e4a-af76-4e870c0f6e35","Type":"ContainerStarted","Data":"6d1761deb426ed493a45ccd92886bcc38c4857669e3030de1962364c5820910b"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.104577 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" event={"ID":"7e2cd62a-874a-4d1b-a706-439c0e7756c0","Type":"ContainerStarted","Data":"90fa7b0d3d01ca231fbd0f53dbe57398108707b0c8e64d3a6559285a353dcbce"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.114743 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" event={"ID":"639db363-b628-4c24-be20-57a1bf05c986","Type":"ContainerStarted","Data":"7916c0d83e886f0fa87407cbcadecfa9b80d2b454258c6221f11b29a5d205ea2"} Jan 22 12:12:12 crc kubenswrapper[4773]: I0122 12:12:12.116689 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" event={"ID":"6e27f8a3-a214-47ec-9027-1a503d588e59","Type":"ContainerStarted","Data":"ce77acd4346c0c87483399ed8a9caf3b30a0660341b9f3fe759ba409d5f64228"} Jan 22 12:12:13 crc kubenswrapper[4773]: E0122 12:12:13.228628 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" podUID="02d2e417-7591-4d07-850e-4c670b40d1ea" Jan 22 12:12:13 crc kubenswrapper[4773]: E0122 12:12:13.229387 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:65cfe5b9d5b0571aaf8ff9840b12cc56e90ca4cef162dd260c3a9fa2b52c6dd0\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" podUID="592864bf-34a6-4335-9425-72386e772818" Jan 22 12:12:13 crc kubenswrapper[4773]: E0122 12:12:13.229448 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:a8fc8f9d445b1232f446119015b226008b07c6a259f5bebc1fcbb39ec310afe5\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" podUID="85ad5e61-8c92-4856-a34c-7d02aadbbc43" Jan 22 12:12:13 crc kubenswrapper[4773]: E0122 12:12:13.230358 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492\\\"\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" podUID="54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0" Jan 22 12:12:14 crc kubenswrapper[4773]: I0122 12:12:14.710412 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:14 crc kubenswrapper[4773]: E0122 12:12:14.711149 4773 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:14 crc kubenswrapper[4773]: E0122 12:12:14.711193 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert podName:29637656-53e6-4957-88ea-2445b706ec08 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:22.71117933 +0000 UTC m=+1050.289295155 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert") pod "infra-operator-controller-manager-54ccf4f85d-dlzxj" (UID: "29637656-53e6-4957-88ea-2445b706ec08") : secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:15 crc kubenswrapper[4773]: I0122 12:12:15.846594 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:15 crc kubenswrapper[4773]: E0122 12:12:15.846928 4773 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:15 crc kubenswrapper[4773]: E0122 12:12:15.848791 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert podName:fd720cc2-9948-4a4d-951f-17a20558e0e2 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:23.848760953 +0000 UTC m=+1051.426876778 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert") pod "openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" (UID: "fd720cc2-9948-4a4d-951f-17a20558e0e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:16 crc kubenswrapper[4773]: I0122 12:12:16.051034 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:16 crc kubenswrapper[4773]: I0122 12:12:16.051102 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:16 crc kubenswrapper[4773]: E0122 12:12:16.051262 4773 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 12:12:16 crc kubenswrapper[4773]: E0122 12:12:16.051307 4773 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 12:12:16 crc kubenswrapper[4773]: E0122 12:12:16.051370 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:24.051347249 +0000 UTC m=+1051.629463084 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "metrics-server-cert" not found Jan 22 12:12:16 crc kubenswrapper[4773]: E0122 12:12:16.051390 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:24.05138219 +0000 UTC m=+1051.629498125 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "webhook-server-cert" not found Jan 22 12:12:22 crc kubenswrapper[4773]: I0122 12:12:22.732188 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:22 crc kubenswrapper[4773]: E0122 12:12:22.732372 4773 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:22 crc kubenswrapper[4773]: E0122 12:12:22.733429 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert podName:29637656-53e6-4957-88ea-2445b706ec08 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:38.733410056 +0000 UTC m=+1066.311525931 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert") pod "infra-operator-controller-manager-54ccf4f85d-dlzxj" (UID: "29637656-53e6-4957-88ea-2445b706ec08") : secret "infra-operator-webhook-server-cert" not found Jan 22 12:12:23 crc kubenswrapper[4773]: I0122 12:12:23.850782 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:23 crc kubenswrapper[4773]: E0122 12:12:23.850926 4773 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:23 crc kubenswrapper[4773]: E0122 12:12:23.851105 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert podName:fd720cc2-9948-4a4d-951f-17a20558e0e2 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:39.851053209 +0000 UTC m=+1067.429169034 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert") pod "openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" (UID: "fd720cc2-9948-4a4d-951f-17a20558e0e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 22 12:12:23 crc kubenswrapper[4773]: E0122 12:12:23.863785 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337" Jan 22 12:12:23 crc kubenswrapper[4773]: E0122 12:12:23.864028 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zczw8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-78fdd796fd-lm8jc_openstack-operators(a1965d1a-cb7f-4da6-90ab-1f75449e3e97): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:23 crc kubenswrapper[4773]: E0122 12:12:23.865200 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" podUID="a1965d1a-cb7f-4da6-90ab-1f75449e3e97" Jan 22 12:12:24 crc kubenswrapper[4773]: I0122 12:12:24.054737 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:24 crc kubenswrapper[4773]: I0122 12:12:24.054786 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:24 crc kubenswrapper[4773]: E0122 12:12:24.054954 4773 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 22 12:12:24 crc kubenswrapper[4773]: E0122 12:12:24.055034 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:40.055016255 +0000 UTC m=+1067.633132080 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "metrics-server-cert" not found Jan 22 12:12:24 crc kubenswrapper[4773]: E0122 12:12:24.054965 4773 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 22 12:12:24 crc kubenswrapper[4773]: E0122 12:12:24.055405 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs podName:4947f3f4-af75-45ed-9481-f4c8f3e525d8 nodeName:}" failed. No retries permitted until 2026-01-22 12:12:40.055391216 +0000 UTC m=+1067.633507041 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs") pod "openstack-operator-controller-manager-788c8b99b5-fh84n" (UID: "4947f3f4-af75-45ed-9481-f4c8f3e525d8") : secret "webhook-server-cert" not found Jan 22 12:12:24 crc kubenswrapper[4773]: E0122 12:12:24.478149 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:9caae9b3ee328df678baa26454e45e47693acdadb27f9c635680597aaec43337\\\"\"" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" podUID="a1965d1a-cb7f-4da6-90ab-1f75449e3e97" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.065867 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:e950ac2df7be78ae0cbcf62fe12ee7a06b628f1903da6fcb741609e857eb1a7f" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.066363 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:e950ac2df7be78ae0cbcf62fe12ee7a06b628f1903da6fcb741609e857eb1a7f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9hw2v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-69cf5d4557-k7bqw_openstack-operators(e0e2887e-f5a6-48e6-862c-593b909d5514): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.067620 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" podUID="e0e2887e-f5a6-48e6-862c-593b909d5514" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.499589 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:e950ac2df7be78ae0cbcf62fe12ee7a06b628f1903da6fcb741609e857eb1a7f\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" podUID="e0e2887e-f5a6-48e6-862c-593b909d5514" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.679869 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.680057 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-glhdq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-69797bbcbd-9gcmw_openstack-operators(e817d93c-c5c3-44ed-92aa-e761dda7eaa9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:27 crc kubenswrapper[4773]: E0122 12:12:27.681372 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" podUID="e817d93c-c5c3-44ed-92aa-e761dda7eaa9" Jan 22 12:12:28 crc kubenswrapper[4773]: E0122 12:12:28.507084 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" podUID="e817d93c-c5c3-44ed-92aa-e761dda7eaa9" Jan 22 12:12:29 crc kubenswrapper[4773]: E0122 12:12:29.476854 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8" Jan 22 12:12:29 crc kubenswrapper[4773]: E0122 12:12:29.477576 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g2wpc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-78c6999f6f-sd745_openstack-operators(cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:29 crc kubenswrapper[4773]: E0122 12:12:29.478831 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" podUID="cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e" Jan 22 12:12:29 crc kubenswrapper[4773]: E0122 12:12:29.513861 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" podUID="cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.240811 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.241000 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gb9s9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-c87fff755-rjsjw_openstack-operators(be3892a0-8b94-459b-9d05-9aae47107554): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.242200 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" podUID="be3892a0-8b94-459b-9d05-9aae47107554" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.519590 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:ff0b6c27e2d96afccd73fbbb5b5297a3f60c7f4f1dfd2a877152466697018d71\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" podUID="be3892a0-8b94-459b-9d05-9aae47107554" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.800948 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.801143 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g2njq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-msvwn_openstack-operators(7e2cd62a-874a-4d1b-a706-439c0e7756c0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:30 crc kubenswrapper[4773]: E0122 12:12:30.802818 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" podUID="7e2cd62a-874a-4d1b-a706-439c0e7756c0" Jan 22 12:12:31 crc kubenswrapper[4773]: E0122 12:12:31.527223 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" podUID="7e2cd62a-874a-4d1b-a706-439c0e7756c0" Jan 22 12:12:34 crc kubenswrapper[4773]: E0122 12:12:34.269169 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b" Jan 22 12:12:34 crc kubenswrapper[4773]: E0122 12:12:34.269677 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-c6kkw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5ffb9c6597-ptq9w_openstack-operators(9731c1da-ba60-4e4a-af76-4e870c0f6e35): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:34 crc kubenswrapper[4773]: E0122 12:12:34.270866 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" podUID="9731c1da-ba60-4e4a-af76-4e870c0f6e35" Jan 22 12:12:34 crc kubenswrapper[4773]: E0122 12:12:34.541328 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:2d6d13b3c28e45c6bec980b8808dda8da4723ae87e66d04f53d52c3b3c51612b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" podUID="9731c1da-ba60-4e4a-af76-4e870c0f6e35" Jan 22 12:12:36 crc kubenswrapper[4773]: E0122 12:12:36.582045 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 22 12:12:36 crc kubenswrapper[4773]: E0122 12:12:36.583010 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9n79x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-qgnch_openstack-operators(7da5c99e-8a28-4671-b7fb-43ec8b4d6faf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:36 crc kubenswrapper[4773]: E0122 12:12:36.584258 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" podUID="7da5c99e-8a28-4671-b7fb-43ec8b4d6faf" Jan 22 12:12:37 crc kubenswrapper[4773]: E0122 12:12:37.113552 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831" Jan 22 12:12:37 crc kubenswrapper[4773]: E0122 12:12:37.114515 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-52c22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-6b8bc8d87d-t4ws7_openstack-operators(639db363-b628-4c24-be20-57a1bf05c986): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:37 crc kubenswrapper[4773]: E0122 12:12:37.115799 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" podUID="639db363-b628-4c24-be20-57a1bf05c986" Jan 22 12:12:37 crc kubenswrapper[4773]: E0122 12:12:37.561808 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" podUID="7da5c99e-8a28-4671-b7fb-43ec8b4d6faf" Jan 22 12:12:37 crc kubenswrapper[4773]: E0122 12:12:37.563364 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:4e995cfa360a9d595a01b9c0541ab934692f2374203cb5738127dd784f793831\\\"\"" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" podUID="639db363-b628-4c24-be20-57a1bf05c986" Jan 22 12:12:38 crc kubenswrapper[4773]: I0122 12:12:38.778597 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:38 crc kubenswrapper[4773]: I0122 12:12:38.784137 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/29637656-53e6-4957-88ea-2445b706ec08-cert\") pod \"infra-operator-controller-manager-54ccf4f85d-dlzxj\" (UID: \"29637656-53e6-4957-88ea-2445b706ec08\") " pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:38 crc kubenswrapper[4773]: I0122 12:12:38.855163 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:39 crc kubenswrapper[4773]: I0122 12:12:39.658884 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:12:39 crc kubenswrapper[4773]: I0122 12:12:39.895750 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:39 crc kubenswrapper[4773]: I0122 12:12:39.901761 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fd720cc2-9948-4a4d-951f-17a20558e0e2-cert\") pod \"openstack-baremetal-operator-controller-manager-7c9c58b55799pgj\" (UID: \"fd720cc2-9948-4a4d-951f-17a20558e0e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:40 crc kubenswrapper[4773]: I0122 12:12:40.002058 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:40 crc kubenswrapper[4773]: I0122 12:12:40.099317 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:40 crc kubenswrapper[4773]: I0122 12:12:40.099367 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:40 crc kubenswrapper[4773]: I0122 12:12:40.104558 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-metrics-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:40 crc kubenswrapper[4773]: I0122 12:12:40.106375 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4947f3f4-af75-45ed-9481-f4c8f3e525d8-webhook-certs\") pod \"openstack-operator-controller-manager-788c8b99b5-fh84n\" (UID: \"4947f3f4-af75-45ed-9481-f4c8f3e525d8\") " pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:40 crc kubenswrapper[4773]: I0122 12:12:40.206382 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:46 crc kubenswrapper[4773]: I0122 12:12:46.492614 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n"] Jan 22 12:12:46 crc kubenswrapper[4773]: W0122 12:12:46.515330 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4947f3f4_af75_45ed_9481_f4c8f3e525d8.slice/crio-33b20033a6245feeeaa1a759f38a8e6f6fefb13f6ad2d1a9af6e86106415cc5f WatchSource:0}: Error finding container 33b20033a6245feeeaa1a759f38a8e6f6fefb13f6ad2d1a9af6e86106415cc5f: Status 404 returned error can't find the container with id 33b20033a6245feeeaa1a759f38a8e6f6fefb13f6ad2d1a9af6e86106415cc5f Jan 22 12:12:46 crc kubenswrapper[4773]: I0122 12:12:46.554215 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj"] Jan 22 12:12:46 crc kubenswrapper[4773]: I0122 12:12:46.625488 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" event={"ID":"fd720cc2-9948-4a4d-951f-17a20558e0e2","Type":"ContainerStarted","Data":"fe45f4b5658d975eea522007221385cecba07db75b3c1494695710b7ead02c50"} Jan 22 12:12:46 crc kubenswrapper[4773]: I0122 12:12:46.627005 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" event={"ID":"4947f3f4-af75-45ed-9481-f4c8f3e525d8","Type":"ContainerStarted","Data":"33b20033a6245feeeaa1a759f38a8e6f6fefb13f6ad2d1a9af6e86106415cc5f"} Jan 22 12:12:46 crc kubenswrapper[4773]: I0122 12:12:46.637732 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj"] Jan 22 12:12:46 crc kubenswrapper[4773]: W0122 12:12:46.639047 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29637656_53e6_4957_88ea_2445b706ec08.slice/crio-ebc852ba4bcc399f90507c0036bed27bd731e37e54b07fdfd8f28ad10972c9ed WatchSource:0}: Error finding container ebc852ba4bcc399f90507c0036bed27bd731e37e54b07fdfd8f28ad10972c9ed: Status 404 returned error can't find the container with id ebc852ba4bcc399f90507c0036bed27bd731e37e54b07fdfd8f28ad10972c9ed Jan 22 12:12:47 crc kubenswrapper[4773]: I0122 12:12:47.641718 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" event={"ID":"29637656-53e6-4957-88ea-2445b706ec08","Type":"ContainerStarted","Data":"ebc852ba4bcc399f90507c0036bed27bd731e37e54b07fdfd8f28ad10972c9ed"} Jan 22 12:12:47 crc kubenswrapper[4773]: E0122 12:12:47.833703 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349" Jan 22 12:12:47 crc kubenswrapper[4773]: E0122 12:12:47.833849 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zp4bv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b8b6d4659-x6894_openstack-operators(02d2e417-7591-4d07-850e-4c670b40d1ea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:12:47 crc kubenswrapper[4773]: E0122 12:12:47.835581 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" podUID="02d2e417-7591-4d07-850e-4c670b40d1ea" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.747511 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" event={"ID":"d4fe0850-2785-4433-8e0a-28efdea91b64","Type":"ContainerStarted","Data":"ba69b193e0a1cc79c2030d2429def64103cd56c2c9bddadab4f384732e7483cc"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.749524 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.751889 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" event={"ID":"9aeccfa1-86f0-47e3-96c7-e0d018d24537","Type":"ContainerStarted","Data":"f2bc2d3a0bdc5dd56fc7e84346f5c652dd6665ae8ae78c354c2d0de78c5fc4ec"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.752846 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.755141 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" event={"ID":"4429281b-22db-4df9-8c28-bb30e527b1f6","Type":"ContainerStarted","Data":"64c08c42f046aa6dcca461f19dca58c06d0535b7cf093293ba6a3747ab3b0cfb"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.755868 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.758663 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" event={"ID":"85ad5e61-8c92-4856-a34c-7d02aadbbc43","Type":"ContainerStarted","Data":"e73ec0d1bd2796a3077594a03255dbb18b45864895dd4d56508cd780094cd1af"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.759493 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.761947 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" event={"ID":"54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0","Type":"ContainerStarted","Data":"20ecfef70c905a73bfa12e3334aaecd904f5216b01af07b0952f35c77aec9a54"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.762664 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.764958 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" event={"ID":"813b6b20-e15b-4da5-959a-d719f973a4e5","Type":"ContainerStarted","Data":"e64e568c216da4d461f39b43f7e6576955c6b8ca8cd36d01dc49e4b0db36c40a"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.765635 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.768340 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" event={"ID":"6e27f8a3-a214-47ec-9027-1a503d588e59","Type":"ContainerStarted","Data":"58bf1a518b3ea68a0eb7e2e3c31a7404513267b1f1a49038af704f313a1f4b49"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.769069 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.771315 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" event={"ID":"73fae2b3-a45d-431f-9113-7f669d5eff6d","Type":"ContainerStarted","Data":"038040c016e548e94081453d3dd9e2e5c82ba62cbb39b7c1eb35ee8abb81e817"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.772056 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.789933 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" event={"ID":"b6ef28d3-92bf-43a7-a577-c4ac162ab48a","Type":"ContainerStarted","Data":"8fdf6e8db6e87beb57b94687a56f014d9877622488075b42594e8726b4104cd5"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.791150 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.794817 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" event={"ID":"592864bf-34a6-4335-9425-72386e772818","Type":"ContainerStarted","Data":"df48dc1a543df8333b2b59494d3ec5aaee3951763da93928fb804edc08e9cfee"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.795409 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.828160 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" event={"ID":"4947f3f4-af75-45ed-9481-f4c8f3e525d8","Type":"ContainerStarted","Data":"25ffd870d5c5e1e083ad10434c6bea42d925503afe169c57e3f86b7cd9f98f6d"} Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.829064 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.880754 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" podStartSLOduration=17.414837498 podStartE2EDuration="42.880737639s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.09454116 +0000 UTC m=+1038.672656985" lastFinishedPulling="2026-01-22 12:12:36.560441301 +0000 UTC m=+1064.138557126" observedRunningTime="2026-01-22 12:12:48.857889455 +0000 UTC m=+1076.436005280" watchObservedRunningTime="2026-01-22 12:12:48.880737639 +0000 UTC m=+1076.458853464" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.909606 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" podStartSLOduration=15.508025736 podStartE2EDuration="42.909582387s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:09.157668685 +0000 UTC m=+1036.735784520" lastFinishedPulling="2026-01-22 12:12:36.559225356 +0000 UTC m=+1064.137341171" observedRunningTime="2026-01-22 12:12:48.896566079 +0000 UTC m=+1076.474681914" watchObservedRunningTime="2026-01-22 12:12:48.909582387 +0000 UTC m=+1076.487698212" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.909842 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" podStartSLOduration=17.417628959 podStartE2EDuration="42.909836794s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.067144195 +0000 UTC m=+1038.645260020" lastFinishedPulling="2026-01-22 12:12:36.55935203 +0000 UTC m=+1064.137467855" observedRunningTime="2026-01-22 12:12:48.880399459 +0000 UTC m=+1076.458515284" watchObservedRunningTime="2026-01-22 12:12:48.909836794 +0000 UTC m=+1076.487952619" Jan 22 12:12:48 crc kubenswrapper[4773]: I0122 12:12:48.932339 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" podStartSLOduration=8.027874222 podStartE2EDuration="42.932320068s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.118318583 +0000 UTC m=+1038.696434408" lastFinishedPulling="2026-01-22 12:12:46.022764419 +0000 UTC m=+1073.600880254" observedRunningTime="2026-01-22 12:12:48.92723893 +0000 UTC m=+1076.505354755" watchObservedRunningTime="2026-01-22 12:12:48.932320068 +0000 UTC m=+1076.510435893" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.181121 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" podStartSLOduration=17.691931566 podStartE2EDuration="43.181105675s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.070267733 +0000 UTC m=+1038.648383558" lastFinishedPulling="2026-01-22 12:12:36.559441842 +0000 UTC m=+1064.137557667" observedRunningTime="2026-01-22 12:12:49.178470039 +0000 UTC m=+1076.756585854" watchObservedRunningTime="2026-01-22 12:12:49.181105675 +0000 UTC m=+1076.759221500" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.210177 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" podStartSLOduration=17.760050006 podStartE2EDuration="43.210153299s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.109246406 +0000 UTC m=+1038.687362231" lastFinishedPulling="2026-01-22 12:12:36.559349699 +0000 UTC m=+1064.137465524" observedRunningTime="2026-01-22 12:12:49.208361187 +0000 UTC m=+1076.786477012" watchObservedRunningTime="2026-01-22 12:12:49.210153299 +0000 UTC m=+1076.788269124" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.237185 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" podStartSLOduration=16.098754298 podStartE2EDuration="43.237155224s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:09.42082357 +0000 UTC m=+1036.998939395" lastFinishedPulling="2026-01-22 12:12:36.559224496 +0000 UTC m=+1064.137340321" observedRunningTime="2026-01-22 12:12:49.23291092 +0000 UTC m=+1076.811026745" watchObservedRunningTime="2026-01-22 12:12:49.237155224 +0000 UTC m=+1076.815271049" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.270460 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" podStartSLOduration=16.116062713 podStartE2EDuration="43.270443911s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:09.404972232 +0000 UTC m=+1036.983088057" lastFinishedPulling="2026-01-22 12:12:36.55935343 +0000 UTC m=+1064.137469255" observedRunningTime="2026-01-22 12:12:49.258518645 +0000 UTC m=+1076.836634460" watchObservedRunningTime="2026-01-22 12:12:49.270443911 +0000 UTC m=+1076.848559736" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.289163 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" podStartSLOduration=17.60610142 podStartE2EDuration="43.289144674s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:10.8764585 +0000 UTC m=+1038.454574325" lastFinishedPulling="2026-01-22 12:12:36.559501744 +0000 UTC m=+1064.137617579" observedRunningTime="2026-01-22 12:12:49.287044543 +0000 UTC m=+1076.865160378" watchObservedRunningTime="2026-01-22 12:12:49.289144674 +0000 UTC m=+1076.867260499" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.446523 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" podStartSLOduration=8.51058253 podStartE2EDuration="43.446503306s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.098529323 +0000 UTC m=+1038.676645148" lastFinishedPulling="2026-01-22 12:12:46.034450099 +0000 UTC m=+1073.612565924" observedRunningTime="2026-01-22 12:12:49.442975114 +0000 UTC m=+1077.021090939" watchObservedRunningTime="2026-01-22 12:12:49.446503306 +0000 UTC m=+1077.024619131" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.497067 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" podStartSLOduration=42.497049365 podStartE2EDuration="42.497049365s" podCreationTimestamp="2026-01-22 12:12:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:12:49.492658177 +0000 UTC m=+1077.070774012" watchObservedRunningTime="2026-01-22 12:12:49.497049365 +0000 UTC m=+1077.075165190" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.842167 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" event={"ID":"7e2cd62a-874a-4d1b-a706-439c0e7756c0","Type":"ContainerStarted","Data":"0390c6be6037d7b1109970505d0938fb4eb702ef974156b58983525257dba5a8"} Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.842391 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.845476 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" event={"ID":"be3892a0-8b94-459b-9d05-9aae47107554","Type":"ContainerStarted","Data":"58d32c63977ceaf9b7d6d39ad7533f0f0d46106a5145084d2682dac9e8b1a0c5"} Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.845686 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.854240 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" event={"ID":"e0e2887e-f5a6-48e6-862c-593b909d5514","Type":"ContainerStarted","Data":"1bf2be4329f1dd596b720609f3c705e48128a5d3401ccbdda5571cfac1b44987"} Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.854521 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.878071 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" event={"ID":"e817d93c-c5c3-44ed-92aa-e761dda7eaa9","Type":"ContainerStarted","Data":"0884ed760467f0ad5318aa9a45bbd5c282e4fff5f84a882b169c9afd7f1066be"} Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.879224 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.881159 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" podStartSLOduration=6.889421382 podStartE2EDuration="43.881142015s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.038741741 +0000 UTC m=+1038.616857566" lastFinishedPulling="2026-01-22 12:12:48.030462374 +0000 UTC m=+1075.608578199" observedRunningTime="2026-01-22 12:12:49.877555191 +0000 UTC m=+1077.455671036" watchObservedRunningTime="2026-01-22 12:12:49.881142015 +0000 UTC m=+1077.459257840" Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.892703 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" event={"ID":"a1965d1a-cb7f-4da6-90ab-1f75449e3e97","Type":"ContainerStarted","Data":"9f426302ee04a0a0fa6f6957f178505d9852e519f8e61d5abb09c11815d67990"} Jan 22 12:12:49 crc kubenswrapper[4773]: I0122 12:12:49.893959 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.087066 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" podStartSLOduration=5.221273899 podStartE2EDuration="44.087046678s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:09.41162247 +0000 UTC m=+1036.989738295" lastFinishedPulling="2026-01-22 12:12:48.277395249 +0000 UTC m=+1075.855511074" observedRunningTime="2026-01-22 12:12:49.905322327 +0000 UTC m=+1077.483438162" watchObservedRunningTime="2026-01-22 12:12:50.087046678 +0000 UTC m=+1077.665162513" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.236961 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" event={"ID":"cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e","Type":"ContainerStarted","Data":"e27d48f7cd0333fe9a955b059d5e0751a17445d72509240c20795a1d76bf6d2f"} Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.237431 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.246437 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" podStartSLOduration=6.890227939 podStartE2EDuration="44.246413708s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:10.916993327 +0000 UTC m=+1038.495109142" lastFinishedPulling="2026-01-22 12:12:48.273179086 +0000 UTC m=+1075.851294911" observedRunningTime="2026-01-22 12:12:50.084612507 +0000 UTC m=+1077.662728332" watchObservedRunningTime="2026-01-22 12:12:50.246413708 +0000 UTC m=+1077.824529533" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.250682 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" event={"ID":"9731c1da-ba60-4e4a-af76-4e870c0f6e35","Type":"ContainerStarted","Data":"06d82e02f72359e488826c1ed7c03ea3bc473dcaece74503473e634fa15269c1"} Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.266959 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" podStartSLOduration=5.414686732 podStartE2EDuration="44.266938725s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:09.42081588 +0000 UTC m=+1036.998931705" lastFinishedPulling="2026-01-22 12:12:48.273067873 +0000 UTC m=+1075.851183698" observedRunningTime="2026-01-22 12:12:50.23857039 +0000 UTC m=+1077.816686225" watchObservedRunningTime="2026-01-22 12:12:50.266938725 +0000 UTC m=+1077.845054550" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.590509 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" podStartSLOduration=7.634375223 podStartE2EDuration="44.590493126s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.071771206 +0000 UTC m=+1038.649887031" lastFinishedPulling="2026-01-22 12:12:48.027889109 +0000 UTC m=+1075.606004934" observedRunningTime="2026-01-22 12:12:50.587657483 +0000 UTC m=+1078.165773298" watchObservedRunningTime="2026-01-22 12:12:50.590493126 +0000 UTC m=+1078.168608951" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.764893 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" podStartSLOduration=7.774068736 podStartE2EDuration="44.764871652s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.038374211 +0000 UTC m=+1038.616490036" lastFinishedPulling="2026-01-22 12:12:48.029177127 +0000 UTC m=+1075.607292952" observedRunningTime="2026-01-22 12:12:50.758525798 +0000 UTC m=+1078.336641623" watchObservedRunningTime="2026-01-22 12:12:50.764871652 +0000 UTC m=+1078.342987467" Jan 22 12:12:50 crc kubenswrapper[4773]: I0122 12:12:50.785032 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" podStartSLOduration=6.84792095 podStartE2EDuration="43.785017047s" podCreationTimestamp="2026-01-22 12:12:07 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.093174481 +0000 UTC m=+1038.671290306" lastFinishedPulling="2026-01-22 12:12:48.030270578 +0000 UTC m=+1075.608386403" observedRunningTime="2026-01-22 12:12:50.781496455 +0000 UTC m=+1078.359612300" watchObservedRunningTime="2026-01-22 12:12:50.785017047 +0000 UTC m=+1078.363132872" Jan 22 12:12:52 crc kubenswrapper[4773]: I0122 12:12:52.436830 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" event={"ID":"639db363-b628-4c24-be20-57a1bf05c986","Type":"ContainerStarted","Data":"9bc6ba2058c411fb1bfbe02bb3ac57502e1f2eaec6d5a6f8fd8182013069c2e5"} Jan 22 12:12:52 crc kubenswrapper[4773]: I0122 12:12:52.437347 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:52 crc kubenswrapper[4773]: I0122 12:12:52.590892 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" podStartSLOduration=5.94414885 podStartE2EDuration="46.590870887s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.035919121 +0000 UTC m=+1038.614034946" lastFinishedPulling="2026-01-22 12:12:51.682641158 +0000 UTC m=+1079.260756983" observedRunningTime="2026-01-22 12:12:52.590377862 +0000 UTC m=+1080.168493697" watchObservedRunningTime="2026-01-22 12:12:52.590870887 +0000 UTC m=+1080.168986712" Jan 22 12:12:53 crc kubenswrapper[4773]: I0122 12:12:53.489965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" event={"ID":"7da5c99e-8a28-4671-b7fb-43ec8b4d6faf","Type":"ContainerStarted","Data":"5681c1ed1dceef78dd4b17d1a073e0536a85dfb52bcd59bfd78998981099b9b5"} Jan 22 12:12:53 crc kubenswrapper[4773]: I0122 12:12:53.514212 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qgnch" podStartSLOduration=4.920993192 podStartE2EDuration="45.514193294s" podCreationTimestamp="2026-01-22 12:12:08 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.087735797 +0000 UTC m=+1038.665851622" lastFinishedPulling="2026-01-22 12:12:51.680935899 +0000 UTC m=+1079.259051724" observedRunningTime="2026-01-22 12:12:53.512622279 +0000 UTC m=+1081.090738104" watchObservedRunningTime="2026-01-22 12:12:53.514193294 +0000 UTC m=+1081.092309119" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.543824 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-69cf5d4557-k7bqw" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.660442 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-59dd8b7cbf-9nlp4" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.693648 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-vkt5m" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.693715 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-ztsqs" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.815880 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-69d6c9f5b8-7mdlm" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.987402 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-br2hg" Jan 22 12:12:56 crc kubenswrapper[4773]: I0122 12:12:56.988138 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-lm8jc" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.281319 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5d8f59fb49-vlvhw" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.282132 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-rjsjw" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.299355 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-6b8bc8d87d-t4ws7" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.320392 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7bd9774b6-qwf6d" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.539150 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-msvwn" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.641732 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-sd745" Jan 22 12:12:57 crc kubenswrapper[4773]: I0122 12:12:57.698422 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5d646b7d76-h6ggd" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.224435 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-lx75p" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.667365 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-rx8n5" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.667834 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-9gcmw" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.720086 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.721894 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5ffb9c6597-ptq9w" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.880664 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" event={"ID":"fd720cc2-9948-4a4d-951f-17a20558e0e2","Type":"ContainerStarted","Data":"6ee7cfe36090ce89332aeac045d4b74cb81d65434cb6776739316dced3d4f32b"} Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.880761 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.882475 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" event={"ID":"29637656-53e6-4957-88ea-2445b706ec08","Type":"ContainerStarted","Data":"537a01f6088a574b468048fb5b42fddf36c4db8f8e064c31967569a5a6c743af"} Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.882636 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.909791 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" podStartSLOduration=41.00198597 podStartE2EDuration="52.909772253s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:46.562273525 +0000 UTC m=+1074.140389350" lastFinishedPulling="2026-01-22 12:12:58.470059808 +0000 UTC m=+1086.048175633" observedRunningTime="2026-01-22 12:12:58.906989013 +0000 UTC m=+1086.485104848" watchObservedRunningTime="2026-01-22 12:12:58.909772253 +0000 UTC m=+1086.487888078" Jan 22 12:12:58 crc kubenswrapper[4773]: I0122 12:12:58.930480 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" podStartSLOduration=41.100812092 podStartE2EDuration="52.930457044s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:46.641745954 +0000 UTC m=+1074.219861769" lastFinishedPulling="2026-01-22 12:12:58.471390896 +0000 UTC m=+1086.049506721" observedRunningTime="2026-01-22 12:12:58.928654062 +0000 UTC m=+1086.506769897" watchObservedRunningTime="2026-01-22 12:12:58.930457044 +0000 UTC m=+1086.508572869" Jan 22 12:12:59 crc kubenswrapper[4773]: E0122 12:12:59.659884 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" podUID="02d2e417-7591-4d07-850e-4c670b40d1ea" Jan 22 12:13:00 crc kubenswrapper[4773]: I0122 12:13:00.212085 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-788c8b99b5-fh84n" Jan 22 12:13:04 crc kubenswrapper[4773]: I0122 12:13:04.150883 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:13:04 crc kubenswrapper[4773]: I0122 12:13:04.151249 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:13:08 crc kubenswrapper[4773]: I0122 12:13:08.862028 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-54ccf4f85d-dlzxj" Jan 22 12:13:10 crc kubenswrapper[4773]: I0122 12:13:10.007451 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c9c58b55799pgj" Jan 22 12:13:15 crc kubenswrapper[4773]: I0122 12:13:15.019458 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" event={"ID":"02d2e417-7591-4d07-850e-4c670b40d1ea","Type":"ContainerStarted","Data":"6b901e0413e6cfe6e752cd89b768dae1f3fc455409da0fe1d2889b5a17229e7e"} Jan 22 12:13:15 crc kubenswrapper[4773]: I0122 12:13:15.020193 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:13:15 crc kubenswrapper[4773]: I0122 12:13:15.038991 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" podStartSLOduration=5.4495308 podStartE2EDuration="1m9.038971396s" podCreationTimestamp="2026-01-22 12:12:06 +0000 UTC" firstStartedPulling="2026-01-22 12:12:11.151858661 +0000 UTC m=+1038.729974486" lastFinishedPulling="2026-01-22 12:13:14.741299247 +0000 UTC m=+1102.319415082" observedRunningTime="2026-01-22 12:13:15.036264208 +0000 UTC m=+1102.614380033" watchObservedRunningTime="2026-01-22 12:13:15.038971396 +0000 UTC m=+1102.617087221" Jan 22 12:13:27 crc kubenswrapper[4773]: I0122 12:13:27.901427 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-x6894" Jan 22 12:13:34 crc kubenswrapper[4773]: I0122 12:13:34.074261 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:13:34 crc kubenswrapper[4773]: I0122 12:13:34.075322 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.208985 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gj65w"] Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.210740 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.213841 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-x7l79" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.214093 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.214305 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.214447 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.232154 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gj65w"] Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.303460 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-2zr2l"] Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.305173 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.308037 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.315594 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-2zr2l"] Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.323000 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srjjr\" (UniqueName: \"kubernetes.io/projected/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-kube-api-access-srjjr\") pod \"dnsmasq-dns-84bb9d8bd9-gj65w\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.323114 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-config\") pod \"dnsmasq-dns-84bb9d8bd9-gj65w\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.424174 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srjjr\" (UniqueName: \"kubernetes.io/projected/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-kube-api-access-srjjr\") pod \"dnsmasq-dns-84bb9d8bd9-gj65w\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.424233 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-config\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.424261 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcz9z\" (UniqueName: \"kubernetes.io/projected/c9a0b915-d77d-4124-98b0-30b003c0889d-kube-api-access-mcz9z\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.424434 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-config\") pod \"dnsmasq-dns-84bb9d8bd9-gj65w\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.424485 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-dns-svc\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.425280 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-config\") pod \"dnsmasq-dns-84bb9d8bd9-gj65w\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.442968 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srjjr\" (UniqueName: \"kubernetes.io/projected/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-kube-api-access-srjjr\") pod \"dnsmasq-dns-84bb9d8bd9-gj65w\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.557501 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.557593 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-config\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.557645 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcz9z\" (UniqueName: \"kubernetes.io/projected/c9a0b915-d77d-4124-98b0-30b003c0889d-kube-api-access-mcz9z\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.557717 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-dns-svc\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.559025 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-dns-svc\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.559321 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-config\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.578610 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcz9z\" (UniqueName: \"kubernetes.io/projected/c9a0b915-d77d-4124-98b0-30b003c0889d-kube-api-access-mcz9z\") pod \"dnsmasq-dns-5f854695bc-2zr2l\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:44 crc kubenswrapper[4773]: I0122 12:13:44.629931 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:13:45 crc kubenswrapper[4773]: I0122 12:13:45.461601 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gj65w"] Jan 22 12:13:45 crc kubenswrapper[4773]: I0122 12:13:45.555251 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-2zr2l"] Jan 22 12:13:45 crc kubenswrapper[4773]: W0122 12:13:45.557159 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9a0b915_d77d_4124_98b0_30b003c0889d.slice/crio-bce71c91100efa5896b4da8815d1e34fad54b051d68421e97be1ac9099902e46 WatchSource:0}: Error finding container bce71c91100efa5896b4da8815d1e34fad54b051d68421e97be1ac9099902e46: Status 404 returned error can't find the container with id bce71c91100efa5896b4da8815d1e34fad54b051d68421e97be1ac9099902e46 Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.241181 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" event={"ID":"c9a0b915-d77d-4124-98b0-30b003c0889d","Type":"ContainerStarted","Data":"bce71c91100efa5896b4da8815d1e34fad54b051d68421e97be1ac9099902e46"} Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.242278 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" event={"ID":"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc","Type":"ContainerStarted","Data":"c179e292cfbdfc1da7644e11100ec840da420454c0b637fbb7f61e535eadc360"} Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.713950 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-2zr2l"] Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.755682 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-9q826"] Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.756913 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.767200 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-9q826"] Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.892012 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/60557b36-ab96-422e-8097-b5fe3a1cf1cf-kube-api-access-669bd\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.892101 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-config\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.892324 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.994386 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/60557b36-ab96-422e-8097-b5fe3a1cf1cf-kube-api-access-669bd\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.994496 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-config\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.994541 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.995936 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:46 crc kubenswrapper[4773]: I0122 12:13:46.996604 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-config\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.047726 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/60557b36-ab96-422e-8097-b5fe3a1cf1cf-kube-api-access-669bd\") pod \"dnsmasq-dns-744ffd65bc-9q826\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.088563 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.391411 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gj65w"] Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.405493 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-dr29n"] Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.406958 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.437105 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-dr29n"] Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.538993 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-config\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.539086 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkkhv\" (UniqueName: \"kubernetes.io/projected/ea5454f0-b36b-438b-92dd-25652ac641ae-kube-api-access-wkkhv\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.539142 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-dns-svc\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.640842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-config\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.640946 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkkhv\" (UniqueName: \"kubernetes.io/projected/ea5454f0-b36b-438b-92dd-25652ac641ae-kube-api-access-wkkhv\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.640989 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-dns-svc\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.642449 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-dns-svc\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.642889 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-config\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.764073 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkkhv\" (UniqueName: \"kubernetes.io/projected/ea5454f0-b36b-438b-92dd-25652ac641ae-kube-api-access-wkkhv\") pod \"dnsmasq-dns-95f5f6995-dr29n\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.985243 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.986709 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.990772 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.990780 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 22 12:13:47 crc kubenswrapper[4773]: I0122 12:13:47.990923 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:47.998096 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-v74vp" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:47.998334 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:47.998385 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:47.998341 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.004470 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.031149 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146422 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146459 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146485 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146521 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146540 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146561 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146583 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146606 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t2j2\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-kube-api-access-2t2j2\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146627 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146642 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a688a76d-2498-4542-8285-709caf211e8a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.146663 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a688a76d-2498-4542-8285-709caf211e8a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.247780 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t2j2\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-kube-api-access-2t2j2\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.247861 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.247902 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a688a76d-2498-4542-8285-709caf211e8a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.247949 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a688a76d-2498-4542-8285-709caf211e8a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.248033 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.248084 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.248131 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.248204 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.248244 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.248306 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.251066 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.252006 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.252800 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.254202 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.255202 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.257734 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.260149 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.263502 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a688a76d-2498-4542-8285-709caf211e8a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.270967 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a688a76d-2498-4542-8285-709caf211e8a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.285543 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t2j2\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-kube-api-access-2t2j2\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.336003 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-9q826"] Jan 22 12:13:48 crc kubenswrapper[4773]: W0122 12:13:48.352431 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60557b36_ab96_422e_8097_b5fe3a1cf1cf.slice/crio-9c5a35bc0674b6fa452a91aeaf956f416d9a4fb8fe8a8bc1da996b4d3b13b8b2 WatchSource:0}: Error finding container 9c5a35bc0674b6fa452a91aeaf956f416d9a4fb8fe8a8bc1da996b4d3b13b8b2: Status 404 returned error can't find the container with id 9c5a35bc0674b6fa452a91aeaf956f416d9a4fb8fe8a8bc1da996b4d3b13b8b2 Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.353038 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.353532 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.373707 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.518323 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.529204 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.533414 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.534070 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-m42kk" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.534207 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.534527 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.535559 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.535600 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.535796 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.537512 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.611947 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.664957 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccjbz\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-kube-api-access-ccjbz\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665018 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4c14d2f-5507-4d08-be37-55d77b5491a3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665103 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4c14d2f-5507-4d08-be37-55d77b5491a3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665142 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665176 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665207 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665247 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665271 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665362 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665390 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.665424 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766624 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766690 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766713 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766744 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766763 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766789 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766906 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccjbz\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-kube-api-access-ccjbz\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.766967 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4c14d2f-5507-4d08-be37-55d77b5491a3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.767020 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4c14d2f-5507-4d08-be37-55d77b5491a3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.767054 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.767103 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.767683 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.767932 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.769980 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.772279 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:48 crc kubenswrapper[4773]: I0122 12:13:48.775050 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:48.962150 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccjbz\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-kube-api-access-ccjbz\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:48.962354 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.261303 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.261450 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.272805 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4c14d2f-5507-4d08-be37-55d77b5491a3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.281110 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4c14d2f-5507-4d08-be37-55d77b5491a3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.287679 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.299543 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-dr29n"] Jan 22 12:13:49 crc kubenswrapper[4773]: W0122 12:13:49.320150 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea5454f0_b36b_438b_92dd_25652ac641ae.slice/crio-966e47f323fa2ca2c712b6a082cc99738ffdbf7528a457538e5379c00e50b4c7 WatchSource:0}: Error finding container 966e47f323fa2ca2c712b6a082cc99738ffdbf7528a457538e5379c00e50b4c7: Status 404 returned error can't find the container with id 966e47f323fa2ca2c712b6a082cc99738ffdbf7528a457538e5379c00e50b4c7 Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.414531 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" event={"ID":"ea5454f0-b36b-438b-92dd-25652ac641ae","Type":"ContainerStarted","Data":"966e47f323fa2ca2c712b6a082cc99738ffdbf7528a457538e5379c00e50b4c7"} Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.424088 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" event={"ID":"60557b36-ab96-422e-8097-b5fe3a1cf1cf","Type":"ContainerStarted","Data":"9c5a35bc0674b6fa452a91aeaf956f416d9a4fb8fe8a8bc1da996b4d3b13b8b2"} Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.610971 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.730357 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.732011 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.735164 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.735707 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.736324 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-thsh4" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.738927 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.744996 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.800819 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.917549 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9gvm\" (UniqueName: \"kubernetes.io/projected/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kube-api-access-t9gvm\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.917609 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.917664 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.917884 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kolla-config\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.917961 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.918030 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.918078 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-default\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:49 crc kubenswrapper[4773]: I0122 12:13:49.918189 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186247 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186304 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9gvm\" (UniqueName: \"kubernetes.io/projected/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kube-api-access-t9gvm\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186352 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186393 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kolla-config\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186429 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186473 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186518 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-default\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.186539 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.197406 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.199927 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.200252 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.201792 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kolla-config\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.202408 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.203158 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-default\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.211383 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.226242 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9gvm\" (UniqueName: \"kubernetes.io/projected/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kube-api-access-t9gvm\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.262195 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.277930 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.386727 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.507061 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a688a76d-2498-4542-8285-709caf211e8a","Type":"ContainerStarted","Data":"610009ff10946940eec1d7088721421a698d3ef78d097981f89cdd05c6290df5"} Jan 22 12:13:50 crc kubenswrapper[4773]: I0122 12:13:50.810666 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.627422 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a4c14d2f-5507-4d08-be37-55d77b5491a3","Type":"ContainerStarted","Data":"a98c9817cf5114a40d609ed1a6434be5a0aa603ad2e880ea2d486b0b022c5803"} Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.766054 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.768439 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.779310 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.782934 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.783183 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.784033 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-wlmdx" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.784192 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.807796 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.936910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937181 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25ztp\" (UniqueName: \"kubernetes.io/projected/96efe0ff-5c9e-465c-8e86-80035697b7d0-kube-api-access-25ztp\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937261 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937406 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937441 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937593 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937703 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:51 crc kubenswrapper[4773]: I0122 12:13:51.937737 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040556 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040618 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040665 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040744 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25ztp\" (UniqueName: \"kubernetes.io/projected/96efe0ff-5c9e-465c-8e86-80035697b7d0-kube-api-access-25ztp\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040784 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040835 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040869 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.040960 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.080208 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.082611 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.083274 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.084499 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.099114 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.212539 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.213396 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.213822 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.217605 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.217891 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-tkpgs" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.222807 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25ztp\" (UniqueName: \"kubernetes.io/projected/96efe0ff-5c9e-465c-8e86-80035697b7d0-kube-api-access-25ztp\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.223739 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.223763 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.227884 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.236082 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.352587 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-config-data\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.352648 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.352678 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tszbj\" (UniqueName: \"kubernetes.io/projected/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kube-api-access-tszbj\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.352762 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kolla-config\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.352793 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.454383 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kolla-config\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.454477 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.454572 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-config-data\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.454602 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.454635 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tszbj\" (UniqueName: \"kubernetes.io/projected/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kube-api-access-tszbj\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.456030 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kolla-config\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.457977 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-config-data\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.570531 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.676131 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b9643af6-36f5-46b0-9cca-b9fe67a689dd","Type":"ContainerStarted","Data":"d724b280f8c5a9d70986a56a1fa43a9abbb4dd28840d585fa25ec19af314a400"} Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.833946 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-combined-ca-bundle\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.852203 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-memcached-tls-certs\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.872106 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tszbj\" (UniqueName: \"kubernetes.io/projected/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kube-api-access-tszbj\") pod \"memcached-0\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " pod="openstack/memcached-0" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.918629 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-tkpgs" Jan 22 12:13:52 crc kubenswrapper[4773]: I0122 12:13:52.931056 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.382448 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.384233 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.387518 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.388025 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-glk5c" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.394566 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.452751 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzdfr\" (UniqueName: \"kubernetes.io/projected/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1-kube-api-access-vzdfr\") pod \"kube-state-metrics-0\" (UID: \"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1\") " pod="openstack/kube-state-metrics-0" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.501423 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.556245 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzdfr\" (UniqueName: \"kubernetes.io/projected/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1-kube-api-access-vzdfr\") pod \"kube-state-metrics-0\" (UID: \"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1\") " pod="openstack/kube-state-metrics-0" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.589515 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzdfr\" (UniqueName: \"kubernetes.io/projected/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1-kube-api-access-vzdfr\") pod \"kube-state-metrics-0\" (UID: \"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1\") " pod="openstack/kube-state-metrics-0" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.840329 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.863682 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"96efe0ff-5c9e-465c-8e86-80035697b7d0","Type":"ContainerStarted","Data":"45996e5e28aa895dfe87cd250c2a4f1b1f42cc55d4a8859f7ff7a4470569a489"} Jan 22 12:13:54 crc kubenswrapper[4773]: I0122 12:13:54.887333 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"384ccba5-b841-48d7-bdb6-ad40c08d6c8f","Type":"ContainerStarted","Data":"bbd855a897f78e0df5ed5f44b7cc8cc23da99e1fc29e60782dab409a668ce5f8"} Jan 22 12:13:56 crc kubenswrapper[4773]: I0122 12:13:56.391656 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:13:56 crc kubenswrapper[4773]: W0122 12:13:56.395403 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ab88165_fbbf_403c_b1c2_ecb80db3e8d1.slice/crio-f9a751b1105a66273c07a4766f5f5965d779bee639ff5c2cdd843861e10f31a7 WatchSource:0}: Error finding container f9a751b1105a66273c07a4766f5f5965d779bee639ff5c2cdd843861e10f31a7: Status 404 returned error can't find the container with id f9a751b1105a66273c07a4766f5f5965d779bee639ff5c2cdd843861e10f31a7 Jan 22 12:13:56 crc kubenswrapper[4773]: I0122 12:13:56.995358 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1","Type":"ContainerStarted","Data":"f9a751b1105a66273c07a4766f5f5965d779bee639ff5c2cdd843861e10f31a7"} Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.620799 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.636597 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.636905 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.641210 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.641712 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.641877 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.642000 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.642989 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-sgt58" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.801671 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6lml\" (UniqueName: \"kubernetes.io/projected/acfbd9c7-d136-4478-a181-7e9fb3033557-kube-api-access-h6lml\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802025 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802098 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802150 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802212 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-config\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802342 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802376 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:57 crc kubenswrapper[4773]: I0122 12:13:57.802497 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021053 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021121 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021177 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021202 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021222 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6lml\" (UniqueName: \"kubernetes.io/projected/acfbd9c7-d136-4478-a181-7e9fb3033557-kube-api-access-h6lml\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021265 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021312 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.021360 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-config\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.022254 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.022480 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-config\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.022692 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.025583 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.082860 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.084171 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.098361 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.111359 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-rp6dh"] Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.112777 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.116779 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.116984 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-79wn7" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.117185 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.121970 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rp6dh"] Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.124339 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6lml\" (UniqueName: \"kubernetes.io/projected/acfbd9c7-d136-4478-a181-7e9fb3033557-kube-api-access-h6lml\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.133403 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.224867 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.229585 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-log-ovn\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.229660 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwv5w\" (UniqueName: \"kubernetes.io/projected/757f37a0-3cc7-4d57-a956-83d236d6cebc-kube-api-access-mwv5w\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.229685 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-combined-ca-bundle\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.229707 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run-ovn\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.229728 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-ovn-controller-tls-certs\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.229803 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.266858 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-fj6wj"] Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.272497 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.288780 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-fj6wj"] Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333689 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-lib\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333803 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333857 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pg8p\" (UniqueName: \"kubernetes.io/projected/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-kube-api-access-4pg8p\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333887 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-scripts\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333911 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwv5w\" (UniqueName: \"kubernetes.io/projected/757f37a0-3cc7-4d57-a956-83d236d6cebc-kube-api-access-mwv5w\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333933 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-combined-ca-bundle\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run-ovn\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333970 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-etc-ovs\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.333990 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-log\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.334031 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.334068 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-run\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.334093 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-log-ovn\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.334129 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-ovn-controller-tls-certs\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.340126 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.342952 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.343370 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run-ovn\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.343488 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-log-ovn\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.357981 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwv5w\" (UniqueName: \"kubernetes.io/projected/757f37a0-3cc7-4d57-a956-83d236d6cebc-kube-api-access-mwv5w\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.359552 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-combined-ca-bundle\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.360910 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-ovn-controller-tls-certs\") pod \"ovn-controller-rp6dh\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.388013 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.435833 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-run\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.435922 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-lib\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.436007 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pg8p\" (UniqueName: \"kubernetes.io/projected/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-kube-api-access-4pg8p\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.436050 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-scripts\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.436085 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-etc-ovs\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.436112 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-log\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.437475 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-log\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.437959 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-run\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.439126 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-lib\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.444767 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-scripts\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.447043 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-etc-ovs\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.474207 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pg8p\" (UniqueName: \"kubernetes.io/projected/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-kube-api-access-4pg8p\") pod \"ovn-controller-ovs-fj6wj\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.546474 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh" Jan 22 12:13:58 crc kubenswrapper[4773]: I0122 12:13:58.617365 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.242766 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rp6dh"] Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.438086 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 12:14:01 crc kubenswrapper[4773]: W0122 12:14:01.513872 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacfbd9c7_d136_4478_a181_7e9fb3033557.slice/crio-fbfaf9c64b19faa0bea647961cb3d7945a89634b2e9731dd67e3bce7fea639f9 WatchSource:0}: Error finding container fbfaf9c64b19faa0bea647961cb3d7945a89634b2e9731dd67e3bce7fea639f9: Status 404 returned error can't find the container with id fbfaf9c64b19faa0bea647961cb3d7945a89634b2e9731dd67e3bce7fea639f9 Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.533948 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-fj6wj"] Jan 22 12:14:01 crc kubenswrapper[4773]: W0122 12:14:01.542558 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod757f37a0_3cc7_4d57_a956_83d236d6cebc.slice/crio-606566cb816ad78e3a1f2afdbbf4e015fcfdfbf9f931d601283018af434bb85b WatchSource:0}: Error finding container 606566cb816ad78e3a1f2afdbbf4e015fcfdfbf9f931d601283018af434bb85b: Status 404 returned error can't find the container with id 606566cb816ad78e3a1f2afdbbf4e015fcfdfbf9f931d601283018af434bb85b Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.671318 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-tjdqh"] Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.672455 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.677916 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.687067 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tjdqh"] Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.696318 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.696357 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-combined-ca-bundle\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.696386 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovs-rundir\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.697375 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovn-rundir\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.697448 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dklht\" (UniqueName: \"kubernetes.io/projected/efd60909-f66c-4dc2-948f-5f63c735ab6e-kube-api-access-dklht\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.697593 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd60909-f66c-4dc2-948f-5f63c735ab6e-config\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.819021 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd60909-f66c-4dc2-948f-5f63c735ab6e-config\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.819334 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-combined-ca-bundle\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.819355 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.819406 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovs-rundir\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.819430 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovn-rundir\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.819459 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dklht\" (UniqueName: \"kubernetes.io/projected/efd60909-f66c-4dc2-948f-5f63c735ab6e-kube-api-access-dklht\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.820478 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd60909-f66c-4dc2-948f-5f63c735ab6e-config\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.830434 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-combined-ca-bundle\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.843925 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dklht\" (UniqueName: \"kubernetes.io/projected/efd60909-f66c-4dc2-948f-5f63c735ab6e-kube-api-access-dklht\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.844271 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovs-rundir\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.844352 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovn-rundir\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:01 crc kubenswrapper[4773]: I0122 12:14:01.862764 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-tjdqh\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.004826 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:14:02 crc kubenswrapper[4773]: W0122 12:14:02.042363 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7865c5e1_9bcc_467f_8a58_1adfaf30ed28.slice/crio-0757ac3ab1be556d2afa95eeac8640d5f4faef141831a893d7b3a9d6b52979b7 WatchSource:0}: Error finding container 0757ac3ab1be556d2afa95eeac8640d5f4faef141831a893d7b3a9d6b52979b7: Status 404 returned error can't find the container with id 0757ac3ab1be556d2afa95eeac8640d5f4faef141831a893d7b3a9d6b52979b7 Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.385624 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-9q826"] Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.410995 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerStarted","Data":"0757ac3ab1be556d2afa95eeac8640d5f4faef141831a893d7b3a9d6b52979b7"} Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.412498 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"acfbd9c7-d136-4478-a181-7e9fb3033557","Type":"ContainerStarted","Data":"fbfaf9c64b19faa0bea647961cb3d7945a89634b2e9731dd67e3bce7fea639f9"} Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.422483 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh" event={"ID":"757f37a0-3cc7-4d57-a956-83d236d6cebc","Type":"ContainerStarted","Data":"606566cb816ad78e3a1f2afdbbf4e015fcfdfbf9f931d601283018af434bb85b"} Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.424212 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7878659675-jn77t"] Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.430691 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.433394 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.441866 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7878659675-jn77t"] Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.601920 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc692\" (UniqueName: \"kubernetes.io/projected/2c2b41e2-772f-49ac-aeef-c5037f50522e-kube-api-access-cc692\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.602096 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.602196 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-config\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.602341 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-dns-svc\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.704272 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc692\" (UniqueName: \"kubernetes.io/projected/2c2b41e2-772f-49ac-aeef-c5037f50522e-kube-api-access-cc692\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.704392 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.704441 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-config\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.704480 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-dns-svc\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.705897 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-dns-svc\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.706882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-ovsdbserver-nb\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.707682 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-config\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.805400 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc692\" (UniqueName: \"kubernetes.io/projected/2c2b41e2-772f-49ac-aeef-c5037f50522e-kube-api-access-cc692\") pod \"dnsmasq-dns-7878659675-jn77t\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.826145 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.827911 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.830610 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.833888 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.834491 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-zw42j" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.834744 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.843299 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.907353 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78ngh\" (UniqueName: \"kubernetes.io/projected/37262aee-18bc-423e-9dac-272af09de237-kube-api-access-78ngh\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.907463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-config\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.907510 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.907562 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.907733 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.909478 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37262aee-18bc-423e-9dac-272af09de237-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.909590 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:02 crc kubenswrapper[4773]: I0122 12:14:02.909620 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022643 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-config\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022706 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022748 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022798 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022844 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37262aee-18bc-423e-9dac-272af09de237-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022875 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022903 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.022927 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78ngh\" (UniqueName: \"kubernetes.io/projected/37262aee-18bc-423e-9dac-272af09de237-kube-api-access-78ngh\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.023802 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37262aee-18bc-423e-9dac-272af09de237-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.025042 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.025366 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.031132 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.033474 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-config\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.107991 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.127557 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78ngh\" (UniqueName: \"kubernetes.io/projected/37262aee-18bc-423e-9dac-272af09de237-kube-api-access-78ngh\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.142579 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.150076 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.161805 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:03 crc kubenswrapper[4773]: I0122 12:14:03.173695 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.176369 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.176445 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.176547 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.177395 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e399a29260ad477e9daad28c8b04af4cd0be5f90f0da32bc9266d74b4dcc71d"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.177460 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://7e399a29260ad477e9daad28c8b04af4cd0be5f90f0da32bc9266d74b4dcc71d" gracePeriod=600 Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.485408 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="7e399a29260ad477e9daad28c8b04af4cd0be5f90f0da32bc9266d74b4dcc71d" exitCode=0 Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.485506 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"7e399a29260ad477e9daad28c8b04af4cd0be5f90f0da32bc9266d74b4dcc71d"} Jan 22 12:14:04 crc kubenswrapper[4773]: I0122 12:14:04.485618 4773 scope.go:117] "RemoveContainer" containerID="a884f8e7ecd2462a2aab65c47dccafce52794662c52b09332c27098b394743b2" Jan 22 12:14:11 crc kubenswrapper[4773]: I0122 12:14:11.914233 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-tjdqh"] Jan 22 12:14:19 crc kubenswrapper[4773]: E0122 12:14:19.645196 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 22 12:14:19 crc kubenswrapper[4773]: E0122 12:14:19.645923 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-25ztp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(96efe0ff-5c9e-465c-8e86-80035697b7d0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:19 crc kubenswrapper[4773]: E0122 12:14:19.647101 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" Jan 22 12:14:20 crc kubenswrapper[4773]: E0122 12:14:20.633471 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d" Jan 22 12:14:20 crc kubenswrapper[4773]: E0122 12:14:20.633963 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2t2j2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(a688a76d-2498-4542-8285-709caf211e8a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:20 crc kubenswrapper[4773]: E0122 12:14:20.635642 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="a688a76d-2498-4542-8285-709caf211e8a" Jan 22 12:14:20 crc kubenswrapper[4773]: E0122 12:14:20.828328 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" Jan 22 12:14:20 crc kubenswrapper[4773]: E0122 12:14:20.828756 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d\\\"\"" pod="openstack/rabbitmq-server-0" podUID="a688a76d-2498-4542-8285-709caf211e8a" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.471728 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.472018 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:nb8h664h5bhfch5bbh679h565h5c6h585h666h94h84h66dh5c4h5ch599h64bh556hf5h56bh549h598h684h55ch58ch7dh569h56dh76h58ch559h57dq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tszbj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(384ccba5-b841-48d7-bdb6-ad40c08d6c8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.473273 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.498010 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.498367 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ccjbz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(a4c14d2f-5507-4d08-be37-55d77b5491a3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.499784 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.506397 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.506584 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t9gvm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(b9643af6-36f5-46b0-9cca-b9fe67a689dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.507750 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" Jan 22 12:14:21 crc kubenswrapper[4773]: I0122 12:14:21.720889 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7878659675-jn77t"] Jan 22 12:14:21 crc kubenswrapper[4773]: I0122 12:14:21.835267 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tjdqh" event={"ID":"efd60909-f66c-4dc2-948f-5f63c735ab6e","Type":"ContainerStarted","Data":"4a771f88881f4b144c25b6c8db1b1748e494fc57b83b33e7fe93c75afa43c430"} Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.839511 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="openstack/openstack-galera-0" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.839512 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc\\\"\"" pod="openstack/memcached-0" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" Jan 22 12:14:21 crc kubenswrapper[4773]: E0122 12:14:21.839574 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" Jan 22 12:14:29 crc kubenswrapper[4773]: E0122 12:14:29.173523 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:df45459c449f64cc6471e98c0890ac00dcc77a940f85d4e7e9d9dd52990d65b3" Jan 22 12:14:29 crc kubenswrapper[4773]: E0122 12:14:29.174223 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:df45459c449f64cc6471e98c0890ac00dcc77a940f85d4e7e9d9dd52990d65b3,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5ddh5d4h97h56bh564h576hfbh669h575h4h648h695h647h656h57bh688hf5h5cch69h89h5c4h6ch54h6bh5f5h588h576h655h5bdhc8hf8h678q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4pg8p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-fj6wj_openstack(7865c5e1-9bcc-467f-8a58-1adfaf30ed28): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:29 crc kubenswrapper[4773]: E0122 12:14:29.175581 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" Jan 22 12:14:29 crc kubenswrapper[4773]: I0122 12:14:29.936155 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-jn77t" event={"ID":"2c2b41e2-772f-49ac-aeef-c5037f50522e","Type":"ContainerStarted","Data":"88c7175967b5ed62ed151fe599e6128504b08cfd9f3da806d9d59f2f8dfe6c0b"} Jan 22 12:14:29 crc kubenswrapper[4773]: E0122 12:14:29.937643 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:df45459c449f64cc6471e98c0890ac00dcc77a940f85d4e7e9d9dd52990d65b3\\\"\"" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" Jan 22 12:14:34 crc kubenswrapper[4773]: E0122 12:14:34.639021 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:947c1bb9373b7d3f2acea104a5666e394c830111bf80d133f1fe7238e4d06f28" Jan 22 12:14:34 crc kubenswrapper[4773]: E0122 12:14:34.639784 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-nb,Image:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:947c1bb9373b7d3f2acea104a5666e394c830111bf80d133f1fe7238e4d06f28,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb8h656h6dh699hf9h5fch5dch599hdbh65dh546h97h656hf8h654h7bh5ddh6h56h5dbh588h697hc7hbh4h668hd7h656h87h584h68bh78q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-nb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h6lml,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-nb-0_openstack(acfbd9c7-d136-4478-a181-7e9fb3033557): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:34 crc kubenswrapper[4773]: I0122 12:14:34.872680 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 12:14:35 crc kubenswrapper[4773]: E0122 12:14:35.123857 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de" Jan 22 12:14:35 crc kubenswrapper[4773]: E0122 12:14:35.124080 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5ddh5d4h97h56bh564h576hfbh669h575h4h648h695h647h656h57bh688hf5h5cch69h89h5c4h6ch54h6bh5f5h588h576h655h5bdhc8hf8h678q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mwv5w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-rp6dh_openstack(757f37a0-3cc7-4d57-a956-83d236d6cebc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:35 crc kubenswrapper[4773]: E0122 12:14:35.125308 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-rp6dh" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.017906 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.018529 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkkhv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-95f5f6995-dr29n_openstack(ea5454f0-b36b-438b-92dd-25652ac641ae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.020725 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" podUID="ea5454f0-b36b-438b-92dd-25652ac641ae" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.037632 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de\\\"\"" pod="openstack/ovn-controller-rp6dh" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" Jan 22 12:14:36 crc kubenswrapper[4773]: W0122 12:14:36.125030 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37262aee_18bc_423e_9dac_272af09de237.slice/crio-c923c11d90d22498df1d6fddb79a76237239776b9fed2ad945a5da95d599b530 WatchSource:0}: Error finding container c923c11d90d22498df1d6fddb79a76237239776b9fed2ad945a5da95d599b530: Status 404 returned error can't find the container with id c923c11d90d22498df1d6fddb79a76237239776b9fed2ad945a5da95d599b530 Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.172277 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.172500 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-669bd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-744ffd65bc-9q826_openstack(60557b36-ab96-422e-8097-b5fe3a1cf1cf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.174158 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" podUID="60557b36-ab96-422e-8097-b5fe3a1cf1cf" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.211266 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.211462 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-srjjr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84bb9d8bd9-gj65w_openstack(1c3338cd-4c30-4c83-bc6a-1cb155a3eefc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.212729 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" podUID="1c3338cd-4c30-4c83-bc6a-1cb155a3eefc" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.217894 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.218024 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mcz9z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-2zr2l_openstack(c9a0b915-d77d-4124-98b0-30b003c0889d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:14:36 crc kubenswrapper[4773]: E0122 12:14:36.222590 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" podUID="c9a0b915-d77d-4124-98b0-30b003c0889d" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.033628 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"e8c9c494f3234f8ef97d1206589812616f50e7a5ed49d844fa29e97c95590447"} Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.037203 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"37262aee-18bc-423e-9dac-272af09de237","Type":"ContainerStarted","Data":"c923c11d90d22498df1d6fddb79a76237239776b9fed2ad945a5da95d599b530"} Jan 22 12:14:37 crc kubenswrapper[4773]: E0122 12:14:37.039858 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" podUID="ea5454f0-b36b-438b-92dd-25652ac641ae" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.667111 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.674571 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.681245 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.723998 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-dns-svc\") pod \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.724618 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-config\") pod \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.724916 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/60557b36-ab96-422e-8097-b5fe3a1cf1cf-kube-api-access-669bd\") pod \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\" (UID: \"60557b36-ab96-422e-8097-b5fe3a1cf1cf\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.725357 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-config" (OuterVolumeSpecName: "config") pod "60557b36-ab96-422e-8097-b5fe3a1cf1cf" (UID: "60557b36-ab96-422e-8097-b5fe3a1cf1cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.725589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "60557b36-ab96-422e-8097-b5fe3a1cf1cf" (UID: "60557b36-ab96-422e-8097-b5fe3a1cf1cf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.726026 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.726057 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60557b36-ab96-422e-8097-b5fe3a1cf1cf-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.738147 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60557b36-ab96-422e-8097-b5fe3a1cf1cf-kube-api-access-669bd" (OuterVolumeSpecName: "kube-api-access-669bd") pod "60557b36-ab96-422e-8097-b5fe3a1cf1cf" (UID: "60557b36-ab96-422e-8097-b5fe3a1cf1cf"). InnerVolumeSpecName "kube-api-access-669bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.826900 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-config\") pod \"c9a0b915-d77d-4124-98b0-30b003c0889d\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.827211 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srjjr\" (UniqueName: \"kubernetes.io/projected/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-kube-api-access-srjjr\") pod \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.827258 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-config\") pod \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\" (UID: \"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.827278 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-dns-svc\") pod \"c9a0b915-d77d-4124-98b0-30b003c0889d\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.827398 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcz9z\" (UniqueName: \"kubernetes.io/projected/c9a0b915-d77d-4124-98b0-30b003c0889d-kube-api-access-mcz9z\") pod \"c9a0b915-d77d-4124-98b0-30b003c0889d\" (UID: \"c9a0b915-d77d-4124-98b0-30b003c0889d\") " Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.827743 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-669bd\" (UniqueName: \"kubernetes.io/projected/60557b36-ab96-422e-8097-b5fe3a1cf1cf-kube-api-access-669bd\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.828124 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-config" (OuterVolumeSpecName: "config") pod "c9a0b915-d77d-4124-98b0-30b003c0889d" (UID: "c9a0b915-d77d-4124-98b0-30b003c0889d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.828564 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c9a0b915-d77d-4124-98b0-30b003c0889d" (UID: "c9a0b915-d77d-4124-98b0-30b003c0889d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.828947 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-config" (OuterVolumeSpecName: "config") pod "1c3338cd-4c30-4c83-bc6a-1cb155a3eefc" (UID: "1c3338cd-4c30-4c83-bc6a-1cb155a3eefc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.843670 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9a0b915-d77d-4124-98b0-30b003c0889d-kube-api-access-mcz9z" (OuterVolumeSpecName: "kube-api-access-mcz9z") pod "c9a0b915-d77d-4124-98b0-30b003c0889d" (UID: "c9a0b915-d77d-4124-98b0-30b003c0889d"). InnerVolumeSpecName "kube-api-access-mcz9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.844963 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-kube-api-access-srjjr" (OuterVolumeSpecName: "kube-api-access-srjjr") pod "1c3338cd-4c30-4c83-bc6a-1cb155a3eefc" (UID: "1c3338cd-4c30-4c83-bc6a-1cb155a3eefc"). InnerVolumeSpecName "kube-api-access-srjjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.929812 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcz9z\" (UniqueName: \"kubernetes.io/projected/c9a0b915-d77d-4124-98b0-30b003c0889d-kube-api-access-mcz9z\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.929852 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.929862 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srjjr\" (UniqueName: \"kubernetes.io/projected/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-kube-api-access-srjjr\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.929870 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:37 crc kubenswrapper[4773]: I0122 12:14:37.929906 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9a0b915-d77d-4124-98b0-30b003c0889d-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.044500 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" event={"ID":"c9a0b915-d77d-4124-98b0-30b003c0889d","Type":"ContainerDied","Data":"bce71c91100efa5896b4da8815d1e34fad54b051d68421e97be1ac9099902e46"} Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.044529 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-2zr2l" Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.045731 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.045714 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-gj65w" event={"ID":"1c3338cd-4c30-4c83-bc6a-1cb155a3eefc","Type":"ContainerDied","Data":"c179e292cfbdfc1da7644e11100ec840da420454c0b637fbb7f61e535eadc360"} Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.047338 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" event={"ID":"60557b36-ab96-422e-8097-b5fe3a1cf1cf","Type":"ContainerDied","Data":"9c5a35bc0674b6fa452a91aeaf956f416d9a4fb8fe8a8bc1da996b4d3b13b8b2"} Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.047372 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-9q826" Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.111760 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-2zr2l"] Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.118473 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-2zr2l"] Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.144578 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gj65w"] Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.151092 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-gj65w"] Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.189742 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-9q826"] Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.198380 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-9q826"] Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.667131 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c3338cd-4c30-4c83-bc6a-1cb155a3eefc" path="/var/lib/kubelet/pods/1c3338cd-4c30-4c83-bc6a-1cb155a3eefc/volumes" Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.668118 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60557b36-ab96-422e-8097-b5fe3a1cf1cf" path="/var/lib/kubelet/pods/60557b36-ab96-422e-8097-b5fe3a1cf1cf/volumes" Jan 22 12:14:38 crc kubenswrapper[4773]: I0122 12:14:38.668702 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9a0b915-d77d-4124-98b0-30b003c0889d" path="/var/lib/kubelet/pods/c9a0b915-d77d-4124-98b0-30b003c0889d/volumes" Jan 22 12:14:39 crc kubenswrapper[4773]: E0122 12:14:39.007671 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Jan 22 12:14:39 crc kubenswrapper[4773]: E0122 12:14:39.007730 4773 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Jan 22 12:14:39 crc kubenswrapper[4773]: E0122 12:14:39.007891 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vzdfr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(8ab88165-fbbf-403c-b1c2-ecb80db3e8d1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 12:14:39 crc kubenswrapper[4773]: E0122 12:14:39.010550 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" Jan 22 12:14:39 crc kubenswrapper[4773]: E0122 12:14:39.073381 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb\\\"\"" pod="openstack/kube-state-metrics-0" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" Jan 22 12:14:39 crc kubenswrapper[4773]: E0122 12:14:39.650492 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-nb-0" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.075706 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b9643af6-36f5-46b0-9cca-b9fe67a689dd","Type":"ContainerStarted","Data":"dba0ccb61ced9efa67fe09f169cc91f5cfd0e2d646aa717d90a06e04a0c5a34d"} Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.076731 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tjdqh" event={"ID":"efd60909-f66c-4dc2-948f-5f63c735ab6e","Type":"ContainerStarted","Data":"5115d35ee6bb43c24c3d875f3e5f3d449a9b5e2ac3662b5da588b01bf65d687a"} Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.078656 4773 generic.go:334] "Generic (PLEG): container finished" podID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerID="24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550" exitCode=0 Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.078707 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-jn77t" event={"ID":"2c2b41e2-772f-49ac-aeef-c5037f50522e","Type":"ContainerDied","Data":"24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550"} Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.080623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"37262aee-18bc-423e-9dac-272af09de237","Type":"ContainerStarted","Data":"5fbb45accf2025632e101b486f97d89eaf10a6d83beaef12851e44a769abaae8"} Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.082930 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"acfbd9c7-d136-4478-a181-7e9fb3033557","Type":"ContainerStarted","Data":"8d2a6050e2267a0f976543f79d40ed2283eece122c1f12726a2431e1a337b88a"} Jan 22 12:14:40 crc kubenswrapper[4773]: E0122 12:14:40.084230 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:947c1bb9373b7d3f2acea104a5666e394c830111bf80d133f1fe7238e4d06f28\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.084348 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"384ccba5-b841-48d7-bdb6-ad40c08d6c8f","Type":"ContainerStarted","Data":"c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155"} Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.084592 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.086346 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"96efe0ff-5c9e-465c-8e86-80035697b7d0","Type":"ContainerStarted","Data":"6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e"} Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.321052 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-tjdqh" podStartSLOduration=21.749117199 podStartE2EDuration="39.32103069s" podCreationTimestamp="2026-01-22 12:14:01 +0000 UTC" firstStartedPulling="2026-01-22 12:14:21.476097514 +0000 UTC m=+1169.054213339" lastFinishedPulling="2026-01-22 12:14:39.048011015 +0000 UTC m=+1186.626126830" observedRunningTime="2026-01-22 12:14:40.312307718 +0000 UTC m=+1187.890423543" watchObservedRunningTime="2026-01-22 12:14:40.32103069 +0000 UTC m=+1187.899146525" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.346197 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=4.416246956 podStartE2EDuration="48.346171473s" podCreationTimestamp="2026-01-22 12:13:52 +0000 UTC" firstStartedPulling="2026-01-22 12:13:54.522306462 +0000 UTC m=+1142.100422287" lastFinishedPulling="2026-01-22 12:14:38.452230978 +0000 UTC m=+1186.030346804" observedRunningTime="2026-01-22 12:14:40.33739713 +0000 UTC m=+1187.915512975" watchObservedRunningTime="2026-01-22 12:14:40.346171473 +0000 UTC m=+1187.924287318" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.745469 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-dr29n"] Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.798521 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-qbbx4"] Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.805084 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.811564 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.818751 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-qbbx4"] Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.892237 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.892310 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-dns-svc\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.892338 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.892474 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrjmj\" (UniqueName: \"kubernetes.io/projected/c60c2e8e-872f-448a-a799-6c49732c906d-kube-api-access-wrjmj\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.892519 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-config\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.994538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrjmj\" (UniqueName: \"kubernetes.io/projected/c60c2e8e-872f-448a-a799-6c49732c906d-kube-api-access-wrjmj\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.994594 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-config\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.994660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.994681 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-dns-svc\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.994697 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.995882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.996343 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.996618 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-dns-svc\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:40 crc kubenswrapper[4773]: I0122 12:14:40.996820 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-config\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.016566 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrjmj\" (UniqueName: \"kubernetes.io/projected/c60c2e8e-872f-448a-a799-6c49732c906d-kube-api-access-wrjmj\") pod \"dnsmasq-dns-586b989cdc-qbbx4\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.106664 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"37262aee-18bc-423e-9dac-272af09de237","Type":"ContainerStarted","Data":"67316730ec155c07e7e601cb8c3c3be87325a29333647bce87c3036b77e66dbf"} Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.110821 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a688a76d-2498-4542-8285-709caf211e8a","Type":"ContainerStarted","Data":"e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9"} Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.119529 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a4c14d2f-5507-4d08-be37-55d77b5491a3","Type":"ContainerStarted","Data":"1c92b816dc78dcade34afa861b434a64234cddd6198a1d85b73b1ca7f992f05c"} Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.123741 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-jn77t" event={"ID":"2c2b41e2-772f-49ac-aeef-c5037f50522e","Type":"ContainerStarted","Data":"091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba"} Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.123778 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:41 crc kubenswrapper[4773]: E0122 12:14:41.125218 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:947c1bb9373b7d3f2acea104a5666e394c830111bf80d133f1fe7238e4d06f28\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.130232 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=37.187831569 podStartE2EDuration="40.13021717s" podCreationTimestamp="2026-01-22 12:14:01 +0000 UTC" firstStartedPulling="2026-01-22 12:14:36.137907971 +0000 UTC m=+1183.716023796" lastFinishedPulling="2026-01-22 12:14:39.080293572 +0000 UTC m=+1186.658409397" observedRunningTime="2026-01-22 12:14:41.129570671 +0000 UTC m=+1188.707686496" watchObservedRunningTime="2026-01-22 12:14:41.13021717 +0000 UTC m=+1188.708332995" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.183663 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.208153 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.225006 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7878659675-jn77t" podStartSLOduration=29.970042511 podStartE2EDuration="39.224980539s" podCreationTimestamp="2026-01-22 12:14:02 +0000 UTC" firstStartedPulling="2026-01-22 12:14:29.172018883 +0000 UTC m=+1176.750134708" lastFinishedPulling="2026-01-22 12:14:38.426956911 +0000 UTC m=+1186.005072736" observedRunningTime="2026-01-22 12:14:41.220926297 +0000 UTC m=+1188.799042132" watchObservedRunningTime="2026-01-22 12:14:41.224980539 +0000 UTC m=+1188.803096364" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.312851 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-config\") pod \"ea5454f0-b36b-438b-92dd-25652ac641ae\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.313658 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-dns-svc\") pod \"ea5454f0-b36b-438b-92dd-25652ac641ae\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.313793 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkkhv\" (UniqueName: \"kubernetes.io/projected/ea5454f0-b36b-438b-92dd-25652ac641ae-kube-api-access-wkkhv\") pod \"ea5454f0-b36b-438b-92dd-25652ac641ae\" (UID: \"ea5454f0-b36b-438b-92dd-25652ac641ae\") " Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.313526 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-config" (OuterVolumeSpecName: "config") pod "ea5454f0-b36b-438b-92dd-25652ac641ae" (UID: "ea5454f0-b36b-438b-92dd-25652ac641ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.314930 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ea5454f0-b36b-438b-92dd-25652ac641ae" (UID: "ea5454f0-b36b-438b-92dd-25652ac641ae"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.322619 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5454f0-b36b-438b-92dd-25652ac641ae-kube-api-access-wkkhv" (OuterVolumeSpecName: "kube-api-access-wkkhv") pod "ea5454f0-b36b-438b-92dd-25652ac641ae" (UID: "ea5454f0-b36b-438b-92dd-25652ac641ae"). InnerVolumeSpecName "kube-api-access-wkkhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.416481 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.416523 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkkhv\" (UniqueName: \"kubernetes.io/projected/ea5454f0-b36b-438b-92dd-25652ac641ae-kube-api-access-wkkhv\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.416540 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea5454f0-b36b-438b-92dd-25652ac641ae-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:41 crc kubenswrapper[4773]: I0122 12:14:41.640583 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-qbbx4"] Jan 22 12:14:41 crc kubenswrapper[4773]: W0122 12:14:41.644589 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc60c2e8e_872f_448a_a799_6c49732c906d.slice/crio-c28076ea979ad086f041be9bf8e65fadc6874b48b5bf915fdd5a264249dc5483 WatchSource:0}: Error finding container c28076ea979ad086f041be9bf8e65fadc6874b48b5bf915fdd5a264249dc5483: Status 404 returned error can't find the container with id c28076ea979ad086f041be9bf8e65fadc6874b48b5bf915fdd5a264249dc5483 Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.130984 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" event={"ID":"ea5454f0-b36b-438b-92dd-25652ac641ae","Type":"ContainerDied","Data":"966e47f323fa2ca2c712b6a082cc99738ffdbf7528a457538e5379c00e50b4c7"} Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.132189 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-dr29n" Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.135022 4773 generic.go:334] "Generic (PLEG): container finished" podID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerID="57c81abee5e0a5a46aa6bf73d2046706a6dbbdb9688f83972e15931e0150b254" exitCode=0 Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.135072 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerDied","Data":"57c81abee5e0a5a46aa6bf73d2046706a6dbbdb9688f83972e15931e0150b254"} Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.137175 4773 generic.go:334] "Generic (PLEG): container finished" podID="c60c2e8e-872f-448a-a799-6c49732c906d" containerID="2e0b4b0189a1737da3863eba6cdcb906c3410a9e5f98161cc2a5cf66ee3ea5b6" exitCode=0 Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.138035 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" event={"ID":"c60c2e8e-872f-448a-a799-6c49732c906d","Type":"ContainerDied","Data":"2e0b4b0189a1737da3863eba6cdcb906c3410a9e5f98161cc2a5cf66ee3ea5b6"} Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.138058 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" event={"ID":"c60c2e8e-872f-448a-a799-6c49732c906d","Type":"ContainerStarted","Data":"c28076ea979ad086f041be9bf8e65fadc6874b48b5bf915fdd5a264249dc5483"} Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.173880 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.240655 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.272950 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-dr29n"] Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.285741 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-dr29n"] Jan 22 12:14:42 crc kubenswrapper[4773]: I0122 12:14:42.670128 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea5454f0-b36b-438b-92dd-25652ac641ae" path="/var/lib/kubelet/pods/ea5454f0-b36b-438b-92dd-25652ac641ae/volumes" Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.145880 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerStarted","Data":"42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f"} Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.145928 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerStarted","Data":"705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9"} Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.146148 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.146328 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.148345 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" event={"ID":"c60c2e8e-872f-448a-a799-6c49732c906d","Type":"ContainerStarted","Data":"5af5bfb6cd3cdbbf101349122e170389e4026d37498b841f5ce9005c5128e4ce"} Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.148619 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.148656 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.174705 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-fj6wj" podStartSLOduration=6.12627309 podStartE2EDuration="45.174683985s" podCreationTimestamp="2026-01-22 12:13:58 +0000 UTC" firstStartedPulling="2026-01-22 12:14:02.049397154 +0000 UTC m=+1149.627512979" lastFinishedPulling="2026-01-22 12:14:41.097808049 +0000 UTC m=+1188.675923874" observedRunningTime="2026-01-22 12:14:43.16450595 +0000 UTC m=+1190.742621785" watchObservedRunningTime="2026-01-22 12:14:43.174683985 +0000 UTC m=+1190.752799810" Jan 22 12:14:43 crc kubenswrapper[4773]: I0122 12:14:43.190874 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" podStartSLOduration=3.190854849 podStartE2EDuration="3.190854849s" podCreationTimestamp="2026-01-22 12:14:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:14:43.184041275 +0000 UTC m=+1190.762157120" watchObservedRunningTime="2026-01-22 12:14:43.190854849 +0000 UTC m=+1190.768970674" Jan 22 12:14:44 crc kubenswrapper[4773]: I0122 12:14:44.159952 4773 generic.go:334] "Generic (PLEG): container finished" podID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerID="6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e" exitCode=0 Jan 22 12:14:44 crc kubenswrapper[4773]: I0122 12:14:44.160074 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"96efe0ff-5c9e-465c-8e86-80035697b7d0","Type":"ContainerDied","Data":"6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e"} Jan 22 12:14:44 crc kubenswrapper[4773]: I0122 12:14:44.163463 4773 generic.go:334] "Generic (PLEG): container finished" podID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerID="dba0ccb61ced9efa67fe09f169cc91f5cfd0e2d646aa717d90a06e04a0c5a34d" exitCode=0 Jan 22 12:14:44 crc kubenswrapper[4773]: I0122 12:14:44.163511 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b9643af6-36f5-46b0-9cca-b9fe67a689dd","Type":"ContainerDied","Data":"dba0ccb61ced9efa67fe09f169cc91f5cfd0e2d646aa717d90a06e04a0c5a34d"} Jan 22 12:14:44 crc kubenswrapper[4773]: I0122 12:14:44.240374 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 22 12:14:45 crc kubenswrapper[4773]: I0122 12:14:45.171253 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b9643af6-36f5-46b0-9cca-b9fe67a689dd","Type":"ContainerStarted","Data":"7abeeb25a28ee1cf287a8f8112c5b0ed49bd1977fc5bfc686b83f3ac4dd09463"} Jan 22 12:14:45 crc kubenswrapper[4773]: I0122 12:14:45.173721 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"96efe0ff-5c9e-465c-8e86-80035697b7d0","Type":"ContainerStarted","Data":"ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29"} Jan 22 12:14:45 crc kubenswrapper[4773]: I0122 12:14:45.194198 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=10.555357106 podStartE2EDuration="57.194168381s" podCreationTimestamp="2026-01-22 12:13:48 +0000 UTC" firstStartedPulling="2026-01-22 12:13:51.813175566 +0000 UTC m=+1139.391291391" lastFinishedPulling="2026-01-22 12:14:38.451986841 +0000 UTC m=+1186.030102666" observedRunningTime="2026-01-22 12:14:45.18980078 +0000 UTC m=+1192.767916615" watchObservedRunningTime="2026-01-22 12:14:45.194168381 +0000 UTC m=+1192.772284206" Jan 22 12:14:45 crc kubenswrapper[4773]: I0122 12:14:45.218314 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=10.654963602 podStartE2EDuration="55.218264103s" podCreationTimestamp="2026-01-22 12:13:50 +0000 UTC" firstStartedPulling="2026-01-22 12:13:54.436137899 +0000 UTC m=+1142.014253724" lastFinishedPulling="2026-01-22 12:14:38.9994384 +0000 UTC m=+1186.577554225" observedRunningTime="2026-01-22 12:14:45.214671055 +0000 UTC m=+1192.792786890" watchObservedRunningTime="2026-01-22 12:14:45.218264103 +0000 UTC m=+1192.796379928" Jan 22 12:14:47 crc kubenswrapper[4773]: I0122 12:14:47.933589 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 22 12:14:48 crc kubenswrapper[4773]: I0122 12:14:48.111583 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:49 crc kubenswrapper[4773]: I0122 12:14:49.228427 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh" event={"ID":"757f37a0-3cc7-4d57-a956-83d236d6cebc","Type":"ContainerStarted","Data":"324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e"} Jan 22 12:14:49 crc kubenswrapper[4773]: I0122 12:14:49.230186 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-rp6dh" Jan 22 12:14:49 crc kubenswrapper[4773]: I0122 12:14:49.255220 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-rp6dh" podStartSLOduration=4.240389769 podStartE2EDuration="51.255196223s" podCreationTimestamp="2026-01-22 12:13:58 +0000 UTC" firstStartedPulling="2026-01-22 12:14:01.552456211 +0000 UTC m=+1149.130572036" lastFinishedPulling="2026-01-22 12:14:48.567262665 +0000 UTC m=+1196.145378490" observedRunningTime="2026-01-22 12:14:49.248878743 +0000 UTC m=+1196.826994568" watchObservedRunningTime="2026-01-22 12:14:49.255196223 +0000 UTC m=+1196.833312048" Jan 22 12:14:50 crc kubenswrapper[4773]: I0122 12:14:50.238388 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1","Type":"ContainerStarted","Data":"e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0"} Jan 22 12:14:50 crc kubenswrapper[4773]: I0122 12:14:50.239267 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 12:14:50 crc kubenswrapper[4773]: I0122 12:14:50.259079 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.635221478 podStartE2EDuration="56.259054175s" podCreationTimestamp="2026-01-22 12:13:54 +0000 UTC" firstStartedPulling="2026-01-22 12:13:56.399698092 +0000 UTC m=+1143.977813917" lastFinishedPulling="2026-01-22 12:14:50.023530789 +0000 UTC m=+1197.601646614" observedRunningTime="2026-01-22 12:14:50.255758206 +0000 UTC m=+1197.833874061" watchObservedRunningTime="2026-01-22 12:14:50.259054175 +0000 UTC m=+1197.837170020" Jan 22 12:14:50 crc kubenswrapper[4773]: I0122 12:14:50.387807 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 22 12:14:50 crc kubenswrapper[4773]: I0122 12:14:50.387861 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 22 12:14:51 crc kubenswrapper[4773]: I0122 12:14:51.229476 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:14:51 crc kubenswrapper[4773]: I0122 12:14:51.292080 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-jn77t"] Jan 22 12:14:51 crc kubenswrapper[4773]: I0122 12:14:51.292438 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7878659675-jn77t" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerName="dnsmasq-dns" containerID="cri-o://091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba" gracePeriod=10 Jan 22 12:14:51 crc kubenswrapper[4773]: I0122 12:14:51.365022 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 22 12:14:51 crc kubenswrapper[4773]: I0122 12:14:51.464021 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.059121 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.144086 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-dns-svc\") pod \"2c2b41e2-772f-49ac-aeef-c5037f50522e\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.144236 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-config\") pod \"2c2b41e2-772f-49ac-aeef-c5037f50522e\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.144337 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc692\" (UniqueName: \"kubernetes.io/projected/2c2b41e2-772f-49ac-aeef-c5037f50522e-kube-api-access-cc692\") pod \"2c2b41e2-772f-49ac-aeef-c5037f50522e\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.144374 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-ovsdbserver-nb\") pod \"2c2b41e2-772f-49ac-aeef-c5037f50522e\" (UID: \"2c2b41e2-772f-49ac-aeef-c5037f50522e\") " Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.153139 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-fp8kx"] Jan 22 12:14:52 crc kubenswrapper[4773]: E0122 12:14:52.153784 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerName="dnsmasq-dns" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.153804 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerName="dnsmasq-dns" Jan 22 12:14:52 crc kubenswrapper[4773]: E0122 12:14:52.153833 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerName="init" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.153840 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerName="init" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.153980 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerName="dnsmasq-dns" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.154573 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.159483 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fp8kx"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.159788 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c2b41e2-772f-49ac-aeef-c5037f50522e-kube-api-access-cc692" (OuterVolumeSpecName: "kube-api-access-cc692") pod "2c2b41e2-772f-49ac-aeef-c5037f50522e" (UID: "2c2b41e2-772f-49ac-aeef-c5037f50522e"). InnerVolumeSpecName "kube-api-access-cc692". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.193347 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2c2b41e2-772f-49ac-aeef-c5037f50522e" (UID: "2c2b41e2-772f-49ac-aeef-c5037f50522e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.195573 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2c2b41e2-772f-49ac-aeef-c5037f50522e" (UID: "2c2b41e2-772f-49ac-aeef-c5037f50522e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.196416 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-config" (OuterVolumeSpecName: "config") pod "2c2b41e2-772f-49ac-aeef-c5037f50522e" (UID: "2c2b41e2-772f-49ac-aeef-c5037f50522e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.246706 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs9x7\" (UniqueName: \"kubernetes.io/projected/19ea8aed-d681-4685-8e05-3c653517f21f-kube-api-access-gs9x7\") pod \"keystone-db-create-fp8kx\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.246787 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19ea8aed-d681-4685-8e05-3c653517f21f-operator-scripts\") pod \"keystone-db-create-fp8kx\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.246963 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.246976 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc692\" (UniqueName: \"kubernetes.io/projected/2c2b41e2-772f-49ac-aeef-c5037f50522e-kube-api-access-cc692\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.246986 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.247030 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c2b41e2-772f-49ac-aeef-c5037f50522e-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.263075 4773 generic.go:334] "Generic (PLEG): container finished" podID="2c2b41e2-772f-49ac-aeef-c5037f50522e" containerID="091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba" exitCode=0 Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.263210 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7878659675-jn77t" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.263198 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-jn77t" event={"ID":"2c2b41e2-772f-49ac-aeef-c5037f50522e","Type":"ContainerDied","Data":"091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba"} Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.263538 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7878659675-jn77t" event={"ID":"2c2b41e2-772f-49ac-aeef-c5037f50522e","Type":"ContainerDied","Data":"88c7175967b5ed62ed151fe599e6128504b08cfd9f3da806d9d59f2f8dfe6c0b"} Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.263568 4773 scope.go:117] "RemoveContainer" containerID="091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.286665 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-1b20-account-create-update-5bbc6"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.288184 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.292131 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.301854 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1b20-account-create-update-5bbc6"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.320080 4773 scope.go:117] "RemoveContainer" containerID="24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.357539 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs9x7\" (UniqueName: \"kubernetes.io/projected/19ea8aed-d681-4685-8e05-3c653517f21f-kube-api-access-gs9x7\") pod \"keystone-db-create-fp8kx\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.357649 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19ea8aed-d681-4685-8e05-3c653517f21f-operator-scripts\") pod \"keystone-db-create-fp8kx\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.359667 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19ea8aed-d681-4685-8e05-3c653517f21f-operator-scripts\") pod \"keystone-db-create-fp8kx\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.369962 4773 scope.go:117] "RemoveContainer" containerID="091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.369966 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7878659675-jn77t"] Jan 22 12:14:52 crc kubenswrapper[4773]: E0122 12:14:52.374802 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba\": container with ID starting with 091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba not found: ID does not exist" containerID="091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.374862 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba"} err="failed to get container status \"091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba\": rpc error: code = NotFound desc = could not find container \"091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba\": container with ID starting with 091e2d211db10ee4a5c8a4b768e5946f62e96c378520e9fd48d201e3080036ba not found: ID does not exist" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.374894 4773 scope.go:117] "RemoveContainer" containerID="24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550" Jan 22 12:14:52 crc kubenswrapper[4773]: E0122 12:14:52.375224 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550\": container with ID starting with 24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550 not found: ID does not exist" containerID="24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.375249 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550"} err="failed to get container status \"24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550\": rpc error: code = NotFound desc = could not find container \"24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550\": container with ID starting with 24e0dfadae7bd5769609d7d2cedf007f2a9d31095b70120efad1959d1e02e550 not found: ID does not exist" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.377527 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7878659675-jn77t"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.381091 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs9x7\" (UniqueName: \"kubernetes.io/projected/19ea8aed-d681-4685-8e05-3c653517f21f-kube-api-access-gs9x7\") pod \"keystone-db-create-fp8kx\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.461084 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc19e578-7984-4a19-bc3b-dfb8b707886e-operator-scripts\") pod \"keystone-1b20-account-create-update-5bbc6\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.461264 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42fk7\" (UniqueName: \"kubernetes.io/projected/cc19e578-7984-4a19-bc3b-dfb8b707886e-kube-api-access-42fk7\") pod \"keystone-1b20-account-create-update-5bbc6\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.555204 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.564066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc19e578-7984-4a19-bc3b-dfb8b707886e-operator-scripts\") pod \"keystone-1b20-account-create-update-5bbc6\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.564157 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42fk7\" (UniqueName: \"kubernetes.io/projected/cc19e578-7984-4a19-bc3b-dfb8b707886e-kube-api-access-42fk7\") pod \"keystone-1b20-account-create-update-5bbc6\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.566342 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc19e578-7984-4a19-bc3b-dfb8b707886e-operator-scripts\") pod \"keystone-1b20-account-create-update-5bbc6\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.572266 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.572523 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.584905 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42fk7\" (UniqueName: \"kubernetes.io/projected/cc19e578-7984-4a19-bc3b-dfb8b707886e-kube-api-access-42fk7\") pod \"keystone-1b20-account-create-update-5bbc6\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.631322 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.671352 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c2b41e2-772f-49ac-aeef-c5037f50522e" path="/var/lib/kubelet/pods/2c2b41e2-772f-49ac-aeef-c5037f50522e/volumes" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.672742 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.966711 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-bqvfx"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.968000 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.975238 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bqvfx"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.985325 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-a998-account-create-update-zzgr7"] Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.987037 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.991148 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 22 12:14:52 crc kubenswrapper[4773]: I0122 12:14:52.994054 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a998-account-create-update-zzgr7"] Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.073365 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdjdt\" (UniqueName: \"kubernetes.io/projected/45aa265d-efe0-4e53-8b9b-593f0da53c3f-kube-api-access-pdjdt\") pod \"placement-a998-account-create-update-zzgr7\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.073792 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45aa265d-efe0-4e53-8b9b-593f0da53c3f-operator-scripts\") pod \"placement-a998-account-create-update-zzgr7\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.073843 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb8cab67-3335-428b-bda5-a17d7bbed7df-operator-scripts\") pod \"placement-db-create-bqvfx\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.073895 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfrpm\" (UniqueName: \"kubernetes.io/projected/cb8cab67-3335-428b-bda5-a17d7bbed7df-kube-api-access-gfrpm\") pod \"placement-db-create-bqvfx\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.083175 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fp8kx"] Jan 22 12:14:53 crc kubenswrapper[4773]: W0122 12:14:53.085830 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19ea8aed_d681_4685_8e05_3c653517f21f.slice/crio-72ca0561d51f017913fbb9abcfb605a1ac4b289ddd76c21d9fa6fcae65f35353 WatchSource:0}: Error finding container 72ca0561d51f017913fbb9abcfb605a1ac4b289ddd76c21d9fa6fcae65f35353: Status 404 returned error can't find the container with id 72ca0561d51f017913fbb9abcfb605a1ac4b289ddd76c21d9fa6fcae65f35353 Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.160572 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1b20-account-create-update-5bbc6"] Jan 22 12:14:53 crc kubenswrapper[4773]: W0122 12:14:53.164215 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc19e578_7984_4a19_bc3b_dfb8b707886e.slice/crio-88a3823dc63f1b0307036ea0b4109b4bd3ca0f6fd536d9c2413ab29e785cb468 WatchSource:0}: Error finding container 88a3823dc63f1b0307036ea0b4109b4bd3ca0f6fd536d9c2413ab29e785cb468: Status 404 returned error can't find the container with id 88a3823dc63f1b0307036ea0b4109b4bd3ca0f6fd536d9c2413ab29e785cb468 Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.170185 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.175431 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45aa265d-efe0-4e53-8b9b-593f0da53c3f-operator-scripts\") pod \"placement-a998-account-create-update-zzgr7\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.175501 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb8cab67-3335-428b-bda5-a17d7bbed7df-operator-scripts\") pod \"placement-db-create-bqvfx\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.175561 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfrpm\" (UniqueName: \"kubernetes.io/projected/cb8cab67-3335-428b-bda5-a17d7bbed7df-kube-api-access-gfrpm\") pod \"placement-db-create-bqvfx\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.175641 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdjdt\" (UniqueName: \"kubernetes.io/projected/45aa265d-efe0-4e53-8b9b-593f0da53c3f-kube-api-access-pdjdt\") pod \"placement-a998-account-create-update-zzgr7\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.176572 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb8cab67-3335-428b-bda5-a17d7bbed7df-operator-scripts\") pod \"placement-db-create-bqvfx\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.176724 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45aa265d-efe0-4e53-8b9b-593f0da53c3f-operator-scripts\") pod \"placement-a998-account-create-update-zzgr7\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.196882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfrpm\" (UniqueName: \"kubernetes.io/projected/cb8cab67-3335-428b-bda5-a17d7bbed7df-kube-api-access-gfrpm\") pod \"placement-db-create-bqvfx\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.196962 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdjdt\" (UniqueName: \"kubernetes.io/projected/45aa265d-efe0-4e53-8b9b-593f0da53c3f-kube-api-access-pdjdt\") pod \"placement-a998-account-create-update-zzgr7\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.272722 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1b20-account-create-update-5bbc6" event={"ID":"cc19e578-7984-4a19-bc3b-dfb8b707886e","Type":"ContainerStarted","Data":"88a3823dc63f1b0307036ea0b4109b4bd3ca0f6fd536d9c2413ab29e785cb468"} Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.276296 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fp8kx" event={"ID":"19ea8aed-d681-4685-8e05-3c653517f21f","Type":"ContainerStarted","Data":"e24ddfd76fa1df92fbb4c9d0a52f1d039f2299b447dbc760897120433c3fd08e"} Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.276359 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fp8kx" event={"ID":"19ea8aed-d681-4685-8e05-3c653517f21f","Type":"ContainerStarted","Data":"72ca0561d51f017913fbb9abcfb605a1ac4b289ddd76c21d9fa6fcae65f35353"} Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.296388 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-fp8kx" podStartSLOduration=1.2963696900000001 podStartE2EDuration="1.29636969s" podCreationTimestamp="2026-01-22 12:14:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:14:53.292019519 +0000 UTC m=+1200.870135354" watchObservedRunningTime="2026-01-22 12:14:53.29636969 +0000 UTC m=+1200.874485515" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.302606 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.320473 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.369072 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 22 12:14:53 crc kubenswrapper[4773]: I0122 12:14:53.987656 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bqvfx"] Jan 22 12:14:54 crc kubenswrapper[4773]: W0122 12:14:54.005566 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb8cab67_3335_428b_bda5_a17d7bbed7df.slice/crio-6f6aafac15556f394c2ef0be000d0027011446ee5c0bcf1050ffaf48d5294178 WatchSource:0}: Error finding container 6f6aafac15556f394c2ef0be000d0027011446ee5c0bcf1050ffaf48d5294178: Status 404 returned error can't find the container with id 6f6aafac15556f394c2ef0be000d0027011446ee5c0bcf1050ffaf48d5294178 Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.012700 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a998-account-create-update-zzgr7"] Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.295783 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1b20-account-create-update-5bbc6" event={"ID":"cc19e578-7984-4a19-bc3b-dfb8b707886e","Type":"ContainerStarted","Data":"6bdf627901783056ccf143a1bf488b67db1d389700b6b85cfe24b43321a70af4"} Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.306387 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bqvfx" event={"ID":"cb8cab67-3335-428b-bda5-a17d7bbed7df","Type":"ContainerStarted","Data":"6f6aafac15556f394c2ef0be000d0027011446ee5c0bcf1050ffaf48d5294178"} Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.312515 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a998-account-create-update-zzgr7" event={"ID":"45aa265d-efe0-4e53-8b9b-593f0da53c3f","Type":"ContainerStarted","Data":"16944931830cef43febacf77cc8a02ea387086b0a4f1391d39586288b2a963c5"} Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.324551 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-1b20-account-create-update-5bbc6" podStartSLOduration=2.324530228 podStartE2EDuration="2.324530228s" podCreationTimestamp="2026-01-22 12:14:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:14:54.317257281 +0000 UTC m=+1201.895373126" watchObservedRunningTime="2026-01-22 12:14:54.324530228 +0000 UTC m=+1201.902646053" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.693952 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-7qmzz"] Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.695223 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.708444 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-7qmzz"] Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.861855 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.861976 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.862040 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.862093 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmfs2\" (UniqueName: \"kubernetes.io/projected/87cf9391-4cf2-4f02-a532-e15917e46cb4-kube-api-access-bmfs2\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.862134 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-config\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.963547 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.963623 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.963683 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.963728 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmfs2\" (UniqueName: \"kubernetes.io/projected/87cf9391-4cf2-4f02-a532-e15917e46cb4-kube-api-access-bmfs2\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.964455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-config\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.965114 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.965327 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.965561 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-config\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.966198 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:54 crc kubenswrapper[4773]: I0122 12:14:54.992458 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmfs2\" (UniqueName: \"kubernetes.io/projected/87cf9391-4cf2-4f02-a532-e15917e46cb4-kube-api-access-bmfs2\") pod \"dnsmasq-dns-67fdf7998c-7qmzz\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.029779 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.330587 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a998-account-create-update-zzgr7" event={"ID":"45aa265d-efe0-4e53-8b9b-593f0da53c3f","Type":"ContainerStarted","Data":"8b7efe8e4bbf533e7dc8989603b738a0ad4692d7d386a7b7224f95d3a3d096aa"} Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.334754 4773 generic.go:334] "Generic (PLEG): container finished" podID="19ea8aed-d681-4685-8e05-3c653517f21f" containerID="e24ddfd76fa1df92fbb4c9d0a52f1d039f2299b447dbc760897120433c3fd08e" exitCode=0 Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.334790 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fp8kx" event={"ID":"19ea8aed-d681-4685-8e05-3c653517f21f","Type":"ContainerDied","Data":"e24ddfd76fa1df92fbb4c9d0a52f1d039f2299b447dbc760897120433c3fd08e"} Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.601723 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-7qmzz"] Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.879990 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.891556 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.898153 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.898483 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-zx9bm" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.898548 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.898626 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 22 12:14:55 crc kubenswrapper[4773]: I0122 12:14:55.912418 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.360407 4773 generic.go:334] "Generic (PLEG): container finished" podID="cb8cab67-3335-428b-bda5-a17d7bbed7df" containerID="15e25dd63bf3901cd9fd02136357daec98bd55eadc8869b0674ea5be9ff443d9" exitCode=0 Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.360523 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bqvfx" event={"ID":"cb8cab67-3335-428b-bda5-a17d7bbed7df","Type":"ContainerDied","Data":"15e25dd63bf3901cd9fd02136357daec98bd55eadc8869b0674ea5be9ff443d9"} Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.369756 4773 generic.go:334] "Generic (PLEG): container finished" podID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerID="74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd" exitCode=0 Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.370046 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" event={"ID":"87cf9391-4cf2-4f02-a532-e15917e46cb4","Type":"ContainerDied","Data":"74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd"} Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.370086 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" event={"ID":"87cf9391-4cf2-4f02-a532-e15917e46cb4","Type":"ContainerStarted","Data":"034eacff577dee7d17b843cecc06c7824109ca86057fef0ce427ca8cc1f09f60"} Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.416471 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.416741 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-cache\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.416909 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.417006 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.417092 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jmb8\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-kube-api-access-5jmb8\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.417182 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-lock\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.519576 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.519749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-cache\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.519818 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.519853 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.519883 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jmb8\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-kube-api-access-5jmb8\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.519910 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-lock\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: E0122 12:14:56.520831 4773 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 12:14:56 crc kubenswrapper[4773]: E0122 12:14:56.520865 4773 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 12:14:56 crc kubenswrapper[4773]: E0122 12:14:56.520989 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift podName:6d5ae4d3-bfc2-4d06-a84e-dc56e250514c nodeName:}" failed. No retries permitted until 2026-01-22 12:14:57.020956605 +0000 UTC m=+1204.599072620 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift") pod "swift-storage-0" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c") : configmap "swift-ring-files" not found Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.521628 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.521989 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-lock\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.522011 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-cache\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.525495 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.545171 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jmb8\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-kube-api-access-5jmb8\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.576353 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.821992 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.849853 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-a998-account-create-update-zzgr7" podStartSLOduration=4.849832047 podStartE2EDuration="4.849832047s" podCreationTimestamp="2026-01-22 12:14:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:14:56.439597688 +0000 UTC m=+1204.017713523" watchObservedRunningTime="2026-01-22 12:14:56.849832047 +0000 UTC m=+1204.427947872" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.926524 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs9x7\" (UniqueName: \"kubernetes.io/projected/19ea8aed-d681-4685-8e05-3c653517f21f-kube-api-access-gs9x7\") pod \"19ea8aed-d681-4685-8e05-3c653517f21f\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.926785 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19ea8aed-d681-4685-8e05-3c653517f21f-operator-scripts\") pod \"19ea8aed-d681-4685-8e05-3c653517f21f\" (UID: \"19ea8aed-d681-4685-8e05-3c653517f21f\") " Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.928374 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19ea8aed-d681-4685-8e05-3c653517f21f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19ea8aed-d681-4685-8e05-3c653517f21f" (UID: "19ea8aed-d681-4685-8e05-3c653517f21f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:56 crc kubenswrapper[4773]: I0122 12:14:56.931689 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19ea8aed-d681-4685-8e05-3c653517f21f-kube-api-access-gs9x7" (OuterVolumeSpecName: "kube-api-access-gs9x7") pod "19ea8aed-d681-4685-8e05-3c653517f21f" (UID: "19ea8aed-d681-4685-8e05-3c653517f21f"). InnerVolumeSpecName "kube-api-access-gs9x7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.029633 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:57 crc kubenswrapper[4773]: E0122 12:14:57.029867 4773 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 12:14:57 crc kubenswrapper[4773]: E0122 12:14:57.029905 4773 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.029972 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19ea8aed-d681-4685-8e05-3c653517f21f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:57 crc kubenswrapper[4773]: E0122 12:14:57.029982 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift podName:6d5ae4d3-bfc2-4d06-a84e-dc56e250514c nodeName:}" failed. No retries permitted until 2026-01-22 12:14:58.029953932 +0000 UTC m=+1205.608069757 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift") pod "swift-storage-0" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c") : configmap "swift-ring-files" not found Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.030016 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs9x7\" (UniqueName: \"kubernetes.io/projected/19ea8aed-d681-4685-8e05-3c653517f21f-kube-api-access-gs9x7\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.384840 4773 generic.go:334] "Generic (PLEG): container finished" podID="45aa265d-efe0-4e53-8b9b-593f0da53c3f" containerID="8b7efe8e4bbf533e7dc8989603b738a0ad4692d7d386a7b7224f95d3a3d096aa" exitCode=0 Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.384897 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a998-account-create-update-zzgr7" event={"ID":"45aa265d-efe0-4e53-8b9b-593f0da53c3f","Type":"ContainerDied","Data":"8b7efe8e4bbf533e7dc8989603b738a0ad4692d7d386a7b7224f95d3a3d096aa"} Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.389483 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fp8kx" event={"ID":"19ea8aed-d681-4685-8e05-3c653517f21f","Type":"ContainerDied","Data":"72ca0561d51f017913fbb9abcfb605a1ac4b289ddd76c21d9fa6fcae65f35353"} Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.389960 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72ca0561d51f017913fbb9abcfb605a1ac4b289ddd76c21d9fa6fcae65f35353" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.390359 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fp8kx" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.392908 4773 generic.go:334] "Generic (PLEG): container finished" podID="cc19e578-7984-4a19-bc3b-dfb8b707886e" containerID="6bdf627901783056ccf143a1bf488b67db1d389700b6b85cfe24b43321a70af4" exitCode=0 Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.393024 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1b20-account-create-update-5bbc6" event={"ID":"cc19e578-7984-4a19-bc3b-dfb8b707886e","Type":"ContainerDied","Data":"6bdf627901783056ccf143a1bf488b67db1d389700b6b85cfe24b43321a70af4"} Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.395460 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" event={"ID":"87cf9391-4cf2-4f02-a532-e15917e46cb4","Type":"ContainerStarted","Data":"180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6"} Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.395660 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.398729 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"acfbd9c7-d136-4478-a181-7e9fb3033557","Type":"ContainerStarted","Data":"0c89a12963a143e2b4ba0fe70de80385a721e9ebda036a4115b6dc5e805ca36d"} Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.442505 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" podStartSLOduration=3.44248692 podStartE2EDuration="3.44248692s" podCreationTimestamp="2026-01-22 12:14:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:14:57.430759939 +0000 UTC m=+1205.008875764" watchObservedRunningTime="2026-01-22 12:14:57.44248692 +0000 UTC m=+1205.020602745" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.473578 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=6.680032081 podStartE2EDuration="1m1.473543651s" podCreationTimestamp="2026-01-22 12:13:56 +0000 UTC" firstStartedPulling="2026-01-22 12:14:01.525022381 +0000 UTC m=+1149.103138196" lastFinishedPulling="2026-01-22 12:14:56.318533941 +0000 UTC m=+1203.896649766" observedRunningTime="2026-01-22 12:14:57.466974494 +0000 UTC m=+1205.045090329" watchObservedRunningTime="2026-01-22 12:14:57.473543651 +0000 UTC m=+1205.051659476" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.767393 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.948192 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb8cab67-3335-428b-bda5-a17d7bbed7df-operator-scripts\") pod \"cb8cab67-3335-428b-bda5-a17d7bbed7df\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.948325 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfrpm\" (UniqueName: \"kubernetes.io/projected/cb8cab67-3335-428b-bda5-a17d7bbed7df-kube-api-access-gfrpm\") pod \"cb8cab67-3335-428b-bda5-a17d7bbed7df\" (UID: \"cb8cab67-3335-428b-bda5-a17d7bbed7df\") " Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.949266 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb8cab67-3335-428b-bda5-a17d7bbed7df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cb8cab67-3335-428b-bda5-a17d7bbed7df" (UID: "cb8cab67-3335-428b-bda5-a17d7bbed7df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:57 crc kubenswrapper[4773]: I0122 12:14:57.964827 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb8cab67-3335-428b-bda5-a17d7bbed7df-kube-api-access-gfrpm" (OuterVolumeSpecName: "kube-api-access-gfrpm") pod "cb8cab67-3335-428b-bda5-a17d7bbed7df" (UID: "cb8cab67-3335-428b-bda5-a17d7bbed7df"). InnerVolumeSpecName "kube-api-access-gfrpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.050206 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.050402 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfrpm\" (UniqueName: \"kubernetes.io/projected/cb8cab67-3335-428b-bda5-a17d7bbed7df-kube-api-access-gfrpm\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.050423 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb8cab67-3335-428b-bda5-a17d7bbed7df-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:58 crc kubenswrapper[4773]: E0122 12:14:58.050457 4773 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 12:14:58 crc kubenswrapper[4773]: E0122 12:14:58.050487 4773 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 12:14:58 crc kubenswrapper[4773]: E0122 12:14:58.050552 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift podName:6d5ae4d3-bfc2-4d06-a84e-dc56e250514c nodeName:}" failed. No retries permitted until 2026-01-22 12:15:00.050531494 +0000 UTC m=+1207.628647379 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift") pod "swift-storage-0" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c") : configmap "swift-ring-files" not found Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.095618 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-jf4bv"] Jan 22 12:14:58 crc kubenswrapper[4773]: E0122 12:14:58.096051 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb8cab67-3335-428b-bda5-a17d7bbed7df" containerName="mariadb-database-create" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.096078 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb8cab67-3335-428b-bda5-a17d7bbed7df" containerName="mariadb-database-create" Jan 22 12:14:58 crc kubenswrapper[4773]: E0122 12:14:58.096110 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19ea8aed-d681-4685-8e05-3c653517f21f" containerName="mariadb-database-create" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.096119 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="19ea8aed-d681-4685-8e05-3c653517f21f" containerName="mariadb-database-create" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.096370 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="19ea8aed-d681-4685-8e05-3c653517f21f" containerName="mariadb-database-create" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.096405 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb8cab67-3335-428b-bda5-a17d7bbed7df" containerName="mariadb-database-create" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.097099 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.127941 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-jf4bv"] Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.254369 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700990cd-025d-4ee8-9fc1-8c82488ed341-operator-scripts\") pod \"glance-db-create-jf4bv\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.254425 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdppj\" (UniqueName: \"kubernetes.io/projected/700990cd-025d-4ee8-9fc1-8c82488ed341-kube-api-access-bdppj\") pod \"glance-db-create-jf4bv\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.293231 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5b0e-account-create-update-9j2lk"] Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.294982 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.298815 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.307582 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5b0e-account-create-update-9j2lk"] Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.356067 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700990cd-025d-4ee8-9fc1-8c82488ed341-operator-scripts\") pod \"glance-db-create-jf4bv\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.356113 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdppj\" (UniqueName: \"kubernetes.io/projected/700990cd-025d-4ee8-9fc1-8c82488ed341-kube-api-access-bdppj\") pod \"glance-db-create-jf4bv\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.356960 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700990cd-025d-4ee8-9fc1-8c82488ed341-operator-scripts\") pod \"glance-db-create-jf4bv\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.374955 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdppj\" (UniqueName: \"kubernetes.io/projected/700990cd-025d-4ee8-9fc1-8c82488ed341-kube-api-access-bdppj\") pod \"glance-db-create-jf4bv\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.388756 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.388834 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.409137 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bqvfx" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.409246 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bqvfx" event={"ID":"cb8cab67-3335-428b-bda5-a17d7bbed7df","Type":"ContainerDied","Data":"6f6aafac15556f394c2ef0be000d0027011446ee5c0bcf1050ffaf48d5294178"} Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.409301 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f6aafac15556f394c2ef0be000d0027011446ee5c0bcf1050ffaf48d5294178" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.418339 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jf4bv" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.457573 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krszm\" (UniqueName: \"kubernetes.io/projected/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-kube-api-access-krszm\") pod \"glance-5b0e-account-create-update-9j2lk\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.457922 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-operator-scripts\") pod \"glance-5b0e-account-create-update-9j2lk\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.560006 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krszm\" (UniqueName: \"kubernetes.io/projected/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-kube-api-access-krszm\") pod \"glance-5b0e-account-create-update-9j2lk\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.560204 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-operator-scripts\") pod \"glance-5b0e-account-create-update-9j2lk\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.563935 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-operator-scripts\") pod \"glance-5b0e-account-create-update-9j2lk\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.580911 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krszm\" (UniqueName: \"kubernetes.io/projected/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-kube-api-access-krszm\") pod \"glance-5b0e-account-create-update-9j2lk\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.688802 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.816836 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-kf6jn"] Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.818329 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.822493 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.860864 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-kf6jn"] Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.906119 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.969796 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a093ed-7e51-4caa-b6d7-16af5e03444b-operator-scripts\") pod \"root-account-create-update-kf6jn\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:58 crc kubenswrapper[4773]: I0122 12:14:58.969877 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6265k\" (UniqueName: \"kubernetes.io/projected/f2a093ed-7e51-4caa-b6d7-16af5e03444b-kube-api-access-6265k\") pod \"root-account-create-update-kf6jn\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.028940 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.071506 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45aa265d-efe0-4e53-8b9b-593f0da53c3f-operator-scripts\") pod \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.071597 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdjdt\" (UniqueName: \"kubernetes.io/projected/45aa265d-efe0-4e53-8b9b-593f0da53c3f-kube-api-access-pdjdt\") pod \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\" (UID: \"45aa265d-efe0-4e53-8b9b-593f0da53c3f\") " Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.072014 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a093ed-7e51-4caa-b6d7-16af5e03444b-operator-scripts\") pod \"root-account-create-update-kf6jn\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.072074 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6265k\" (UniqueName: \"kubernetes.io/projected/f2a093ed-7e51-4caa-b6d7-16af5e03444b-kube-api-access-6265k\") pod \"root-account-create-update-kf6jn\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.073253 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a093ed-7e51-4caa-b6d7-16af5e03444b-operator-scripts\") pod \"root-account-create-update-kf6jn\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.074892 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45aa265d-efe0-4e53-8b9b-593f0da53c3f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "45aa265d-efe0-4e53-8b9b-593f0da53c3f" (UID: "45aa265d-efe0-4e53-8b9b-593f0da53c3f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.082175 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45aa265d-efe0-4e53-8b9b-593f0da53c3f-kube-api-access-pdjdt" (OuterVolumeSpecName: "kube-api-access-pdjdt") pod "45aa265d-efe0-4e53-8b9b-593f0da53c3f" (UID: "45aa265d-efe0-4e53-8b9b-593f0da53c3f"). InnerVolumeSpecName "kube-api-access-pdjdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.089314 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6265k\" (UniqueName: \"kubernetes.io/projected/f2a093ed-7e51-4caa-b6d7-16af5e03444b-kube-api-access-6265k\") pod \"root-account-create-update-kf6jn\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.156086 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kf6jn" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.173368 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc19e578-7984-4a19-bc3b-dfb8b707886e-operator-scripts\") pod \"cc19e578-7984-4a19-bc3b-dfb8b707886e\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.173549 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42fk7\" (UniqueName: \"kubernetes.io/projected/cc19e578-7984-4a19-bc3b-dfb8b707886e-kube-api-access-42fk7\") pod \"cc19e578-7984-4a19-bc3b-dfb8b707886e\" (UID: \"cc19e578-7984-4a19-bc3b-dfb8b707886e\") " Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.174701 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc19e578-7984-4a19-bc3b-dfb8b707886e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cc19e578-7984-4a19-bc3b-dfb8b707886e" (UID: "cc19e578-7984-4a19-bc3b-dfb8b707886e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.175174 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45aa265d-efe0-4e53-8b9b-593f0da53c3f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.175204 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdjdt\" (UniqueName: \"kubernetes.io/projected/45aa265d-efe0-4e53-8b9b-593f0da53c3f-kube-api-access-pdjdt\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.175217 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc19e578-7984-4a19-bc3b-dfb8b707886e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.177416 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc19e578-7984-4a19-bc3b-dfb8b707886e-kube-api-access-42fk7" (OuterVolumeSpecName: "kube-api-access-42fk7") pod "cc19e578-7984-4a19-bc3b-dfb8b707886e" (UID: "cc19e578-7984-4a19-bc3b-dfb8b707886e"). InnerVolumeSpecName "kube-api-access-42fk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.277120 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42fk7\" (UniqueName: \"kubernetes.io/projected/cc19e578-7984-4a19-bc3b-dfb8b707886e-kube-api-access-42fk7\") on node \"crc\" DevicePath \"\"" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.377525 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-jf4bv"] Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.418597 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5b0e-account-create-update-9j2lk"] Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.439880 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jf4bv" event={"ID":"700990cd-025d-4ee8-9fc1-8c82488ed341","Type":"ContainerStarted","Data":"67b8c62dc63ee371c0c960e779e2c8eeb9b4b5526abe55c7cda8eac879201ab3"} Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.443692 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a998-account-create-update-zzgr7" event={"ID":"45aa265d-efe0-4e53-8b9b-593f0da53c3f","Type":"ContainerDied","Data":"16944931830cef43febacf77cc8a02ea387086b0a4f1391d39586288b2a963c5"} Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.443756 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16944931830cef43febacf77cc8a02ea387086b0a4f1391d39586288b2a963c5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.443845 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-zzgr7" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.453796 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1b20-account-create-update-5bbc6" event={"ID":"cc19e578-7984-4a19-bc3b-dfb8b707886e","Type":"ContainerDied","Data":"88a3823dc63f1b0307036ea0b4109b4bd3ca0f6fd536d9c2413ab29e785cb468"} Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.453844 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88a3823dc63f1b0307036ea0b4109b4bd3ca0f6fd536d9c2413ab29e785cb468" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.453901 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-5bbc6" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.606317 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-kf6jn"] Jan 22 12:14:59 crc kubenswrapper[4773]: W0122 12:14:59.618098 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2a093ed_7e51_4caa_b6d7_16af5e03444b.slice/crio-438185045e3a878b7544a20d975b8637b4df3be76a654543bcf4dcf1ae6fdc92 WatchSource:0}: Error finding container 438185045e3a878b7544a20d975b8637b4df3be76a654543bcf4dcf1ae6fdc92: Status 404 returned error can't find the container with id 438185045e3a878b7544a20d975b8637b4df3be76a654543bcf4dcf1ae6fdc92 Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.780258 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-bbfh5"] Jan 22 12:14:59 crc kubenswrapper[4773]: E0122 12:14:59.781043 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc19e578-7984-4a19-bc3b-dfb8b707886e" containerName="mariadb-account-create-update" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.781060 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc19e578-7984-4a19-bc3b-dfb8b707886e" containerName="mariadb-account-create-update" Jan 22 12:14:59 crc kubenswrapper[4773]: E0122 12:14:59.781085 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45aa265d-efe0-4e53-8b9b-593f0da53c3f" containerName="mariadb-account-create-update" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.781099 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="45aa265d-efe0-4e53-8b9b-593f0da53c3f" containerName="mariadb-account-create-update" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.781302 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="45aa265d-efe0-4e53-8b9b-593f0da53c3f" containerName="mariadb-account-create-update" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.781329 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc19e578-7984-4a19-bc3b-dfb8b707886e" containerName="mariadb-account-create-update" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.782155 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.789943 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.790163 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.791244 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-bbfh5"] Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.798207 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.894545 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-ring-data-devices\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.894929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-scripts\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.895119 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/1ee43913-badd-4397-a999-70b306ca56c3-etc-swift\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.895299 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv7gk\" (UniqueName: \"kubernetes.io/projected/1ee43913-badd-4397-a999-70b306ca56c3-kube-api-access-rv7gk\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.895516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-swiftconf\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.895645 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-dispersionconf\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.895827 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-combined-ca-bundle\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.997789 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/1ee43913-badd-4397-a999-70b306ca56c3-etc-swift\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.997873 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv7gk\" (UniqueName: \"kubernetes.io/projected/1ee43913-badd-4397-a999-70b306ca56c3-kube-api-access-rv7gk\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.997955 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-swiftconf\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.997991 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-dispersionconf\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.998042 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-combined-ca-bundle\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.998081 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-ring-data-devices\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.998109 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-scripts\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:14:59 crc kubenswrapper[4773]: I0122 12:14:59.998497 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/1ee43913-badd-4397-a999-70b306ca56c3-etc-swift\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.000013 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-ring-data-devices\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.002142 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-scripts\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.006576 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-swiftconf\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.009516 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-dispersionconf\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.010010 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-combined-ca-bundle\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.018927 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv7gk\" (UniqueName: \"kubernetes.io/projected/1ee43913-badd-4397-a999-70b306ca56c3-kube-api-access-rv7gk\") pod \"swift-ring-rebalance-bbfh5\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.099736 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:15:00 crc kubenswrapper[4773]: E0122 12:15:00.099983 4773 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 12:15:00 crc kubenswrapper[4773]: E0122 12:15:00.100008 4773 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 12:15:00 crc kubenswrapper[4773]: E0122 12:15:00.100056 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift podName:6d5ae4d3-bfc2-4d06-a84e-dc56e250514c nodeName:}" failed. No retries permitted until 2026-01-22 12:15:04.100041269 +0000 UTC m=+1211.678157094 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift") pod "swift-storage-0" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c") : configmap "swift-ring-files" not found Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.122371 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.145349 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj"] Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.146690 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.152323 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.152643 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.159564 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj"] Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.302407 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbmkx\" (UniqueName: \"kubernetes.io/projected/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-kube-api-access-kbmkx\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.302837 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-secret-volume\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.302879 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-config-volume\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.404617 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbmkx\" (UniqueName: \"kubernetes.io/projected/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-kube-api-access-kbmkx\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.404693 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-secret-volume\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.404734 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-config-volume\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.406087 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-config-volume\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.411557 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-secret-volume\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.428520 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbmkx\" (UniqueName: \"kubernetes.io/projected/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-kube-api-access-kbmkx\") pod \"collect-profiles-29484735-f2bcj\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.462426 4773 generic.go:334] "Generic (PLEG): container finished" podID="f2a093ed-7e51-4caa-b6d7-16af5e03444b" containerID="2d90c9b5a0623109c593eb949a72965be23283a3b00ef0a41a7c83caaf0e9e07" exitCode=0 Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.462739 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kf6jn" event={"ID":"f2a093ed-7e51-4caa-b6d7-16af5e03444b","Type":"ContainerDied","Data":"2d90c9b5a0623109c593eb949a72965be23283a3b00ef0a41a7c83caaf0e9e07"} Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.462842 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kf6jn" event={"ID":"f2a093ed-7e51-4caa-b6d7-16af5e03444b","Type":"ContainerStarted","Data":"438185045e3a878b7544a20d975b8637b4df3be76a654543bcf4dcf1ae6fdc92"} Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.464185 4773 generic.go:334] "Generic (PLEG): container finished" podID="eadfca17-cc73-4d86-a5b4-6682fbe8d49d" containerID="8f6fb3df8ffd981f7e9f28222c8087c9b5d58f85290053a881c73ef8ed2e0dcb" exitCode=0 Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.464246 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5b0e-account-create-update-9j2lk" event={"ID":"eadfca17-cc73-4d86-a5b4-6682fbe8d49d","Type":"ContainerDied","Data":"8f6fb3df8ffd981f7e9f28222c8087c9b5d58f85290053a881c73ef8ed2e0dcb"} Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.464272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5b0e-account-create-update-9j2lk" event={"ID":"eadfca17-cc73-4d86-a5b4-6682fbe8d49d","Type":"ContainerStarted","Data":"d312bb73ae319b2e057538d362068481254d3023e10c63f4e65705a1bef3dfb3"} Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.467586 4773 generic.go:334] "Generic (PLEG): container finished" podID="700990cd-025d-4ee8-9fc1-8c82488ed341" containerID="10f9a6cfd3b67be41810a7b70e014044939bb3dabad0bead65708e24bd630e37" exitCode=0 Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.467672 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jf4bv" event={"ID":"700990cd-025d-4ee8-9fc1-8c82488ed341","Type":"ContainerDied","Data":"10f9a6cfd3b67be41810a7b70e014044939bb3dabad0bead65708e24bd630e37"} Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.492842 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:00 crc kubenswrapper[4773]: I0122 12:15:00.718482 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-bbfh5"] Jan 22 12:15:00 crc kubenswrapper[4773]: W0122 12:15:00.745140 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ee43913_badd_4397_a999_70b306ca56c3.slice/crio-c906fff5421d366ea136587bb41082007bea340d315691ff18e0e141431617a9 WatchSource:0}: Error finding container c906fff5421d366ea136587bb41082007bea340d315691ff18e0e141431617a9: Status 404 returned error can't find the container with id c906fff5421d366ea136587bb41082007bea340d315691ff18e0e141431617a9 Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.019951 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj"] Jan 22 12:15:01 crc kubenswrapper[4773]: W0122 12:15:01.024018 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e288fcc_8bcb_4e2e_83fe_c17aae51ad38.slice/crio-1928d4fea1d65b2f39e9103a598b67db16bf52acd3adfd9c088fd97f82097435 WatchSource:0}: Error finding container 1928d4fea1d65b2f39e9103a598b67db16bf52acd3adfd9c088fd97f82097435: Status 404 returned error can't find the container with id 1928d4fea1d65b2f39e9103a598b67db16bf52acd3adfd9c088fd97f82097435 Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.458115 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.487029 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bbfh5" event={"ID":"1ee43913-badd-4397-a999-70b306ca56c3","Type":"ContainerStarted","Data":"c906fff5421d366ea136587bb41082007bea340d315691ff18e0e141431617a9"} Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.490441 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" event={"ID":"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38","Type":"ContainerStarted","Data":"1338b4148bfcdb8e0283799088b4f0f13bb8d6a872000ea98faf82042d881516"} Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.490554 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" event={"ID":"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38","Type":"ContainerStarted","Data":"1928d4fea1d65b2f39e9103a598b67db16bf52acd3adfd9c088fd97f82097435"} Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.501401 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.517270 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" podStartSLOduration=1.517244132 podStartE2EDuration="1.517244132s" podCreationTimestamp="2026-01-22 12:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:15:01.51382561 +0000 UTC m=+1209.091941435" watchObservedRunningTime="2026-01-22 12:15:01.517244132 +0000 UTC m=+1209.095359957" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.699744 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.701373 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.704160 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-5wm5b" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.706336 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.709781 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.710138 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.727790 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.857480 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.857859 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st5dc\" (UniqueName: \"kubernetes.io/projected/21901911-8523-4adc-9851-336360f4c11e-kube-api-access-st5dc\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.857911 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/21901911-8523-4adc-9851-336360f4c11e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.858033 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.858104 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.858171 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-config\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.858203 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-scripts\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.959740 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/21901911-8523-4adc-9851-336360f4c11e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.959896 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.959949 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.959999 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-config\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.960028 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-scripts\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.960072 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.960103 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st5dc\" (UniqueName: \"kubernetes.io/projected/21901911-8523-4adc-9851-336360f4c11e-kube-api-access-st5dc\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.961474 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/21901911-8523-4adc-9851-336360f4c11e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.962121 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-scripts\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.963042 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-config\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.968852 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.979679 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:01 crc kubenswrapper[4773]: I0122 12:15:01.990058 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.000184 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st5dc\" (UniqueName: \"kubernetes.io/projected/21901911-8523-4adc-9851-336360f4c11e-kube-api-access-st5dc\") pod \"ovn-northd-0\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " pod="openstack/ovn-northd-0" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.030474 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.151677 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kf6jn" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.179870 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jf4bv" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.184436 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.265472 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700990cd-025d-4ee8-9fc1-8c82488ed341-operator-scripts\") pod \"700990cd-025d-4ee8-9fc1-8c82488ed341\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.265986 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdppj\" (UniqueName: \"kubernetes.io/projected/700990cd-025d-4ee8-9fc1-8c82488ed341-kube-api-access-bdppj\") pod \"700990cd-025d-4ee8-9fc1-8c82488ed341\" (UID: \"700990cd-025d-4ee8-9fc1-8c82488ed341\") " Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.266044 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a093ed-7e51-4caa-b6d7-16af5e03444b-operator-scripts\") pod \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.266123 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6265k\" (UniqueName: \"kubernetes.io/projected/f2a093ed-7e51-4caa-b6d7-16af5e03444b-kube-api-access-6265k\") pod \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\" (UID: \"f2a093ed-7e51-4caa-b6d7-16af5e03444b\") " Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.266991 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2a093ed-7e51-4caa-b6d7-16af5e03444b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f2a093ed-7e51-4caa-b6d7-16af5e03444b" (UID: "f2a093ed-7e51-4caa-b6d7-16af5e03444b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.267378 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/700990cd-025d-4ee8-9fc1-8c82488ed341-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "700990cd-025d-4ee8-9fc1-8c82488ed341" (UID: "700990cd-025d-4ee8-9fc1-8c82488ed341"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.281796 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/700990cd-025d-4ee8-9fc1-8c82488ed341-kube-api-access-bdppj" (OuterVolumeSpecName: "kube-api-access-bdppj") pod "700990cd-025d-4ee8-9fc1-8c82488ed341" (UID: "700990cd-025d-4ee8-9fc1-8c82488ed341"). InnerVolumeSpecName "kube-api-access-bdppj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.286718 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2a093ed-7e51-4caa-b6d7-16af5e03444b-kube-api-access-6265k" (OuterVolumeSpecName: "kube-api-access-6265k") pod "f2a093ed-7e51-4caa-b6d7-16af5e03444b" (UID: "f2a093ed-7e51-4caa-b6d7-16af5e03444b"). InnerVolumeSpecName "kube-api-access-6265k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.367343 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krszm\" (UniqueName: \"kubernetes.io/projected/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-kube-api-access-krszm\") pod \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.367512 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-operator-scripts\") pod \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\" (UID: \"eadfca17-cc73-4d86-a5b4-6682fbe8d49d\") " Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.367879 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdppj\" (UniqueName: \"kubernetes.io/projected/700990cd-025d-4ee8-9fc1-8c82488ed341-kube-api-access-bdppj\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.367901 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a093ed-7e51-4caa-b6d7-16af5e03444b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.367910 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6265k\" (UniqueName: \"kubernetes.io/projected/f2a093ed-7e51-4caa-b6d7-16af5e03444b-kube-api-access-6265k\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.367921 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/700990cd-025d-4ee8-9fc1-8c82488ed341-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.368504 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eadfca17-cc73-4d86-a5b4-6682fbe8d49d" (UID: "eadfca17-cc73-4d86-a5b4-6682fbe8d49d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.371930 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-kube-api-access-krszm" (OuterVolumeSpecName: "kube-api-access-krszm") pod "eadfca17-cc73-4d86-a5b4-6682fbe8d49d" (UID: "eadfca17-cc73-4d86-a5b4-6682fbe8d49d"). InnerVolumeSpecName "kube-api-access-krszm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.470086 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.470160 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krszm\" (UniqueName: \"kubernetes.io/projected/eadfca17-cc73-4d86-a5b4-6682fbe8d49d-kube-api-access-krszm\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.509633 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5b0e-account-create-update-9j2lk" event={"ID":"eadfca17-cc73-4d86-a5b4-6682fbe8d49d","Type":"ContainerDied","Data":"d312bb73ae319b2e057538d362068481254d3023e10c63f4e65705a1bef3dfb3"} Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.509670 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5b0e-account-create-update-9j2lk" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.509678 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d312bb73ae319b2e057538d362068481254d3023e10c63f4e65705a1bef3dfb3" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.512541 4773 generic.go:334] "Generic (PLEG): container finished" podID="9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" containerID="1338b4148bfcdb8e0283799088b4f0f13bb8d6a872000ea98faf82042d881516" exitCode=0 Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.512591 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" event={"ID":"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38","Type":"ContainerDied","Data":"1338b4148bfcdb8e0283799088b4f0f13bb8d6a872000ea98faf82042d881516"} Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.523253 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-jf4bv" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.524190 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-jf4bv" event={"ID":"700990cd-025d-4ee8-9fc1-8c82488ed341","Type":"ContainerDied","Data":"67b8c62dc63ee371c0c960e779e2c8eeb9b4b5526abe55c7cda8eac879201ab3"} Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.524263 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67b8c62dc63ee371c0c960e779e2c8eeb9b4b5526abe55c7cda8eac879201ab3" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.530852 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kf6jn" event={"ID":"f2a093ed-7e51-4caa-b6d7-16af5e03444b","Type":"ContainerDied","Data":"438185045e3a878b7544a20d975b8637b4df3be76a654543bcf4dcf1ae6fdc92"} Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.530919 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="438185045e3a878b7544a20d975b8637b4df3be76a654543bcf4dcf1ae6fdc92" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.531022 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kf6jn" Jan 22 12:15:02 crc kubenswrapper[4773]: I0122 12:15:02.582225 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 12:15:02 crc kubenswrapper[4773]: W0122 12:15:02.591044 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21901911_8523_4adc_9851_336360f4c11e.slice/crio-49d585eb6ef22d582d0ddfbfebfbcd4cce2cb3229ef96d8d2a2301f7bf20b0eb WatchSource:0}: Error finding container 49d585eb6ef22d582d0ddfbfebfbcd4cce2cb3229ef96d8d2a2301f7bf20b0eb: Status 404 returned error can't find the container with id 49d585eb6ef22d582d0ddfbfebfbcd4cce2cb3229ef96d8d2a2301f7bf20b0eb Jan 22 12:15:03 crc kubenswrapper[4773]: I0122 12:15:03.548761 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"21901911-8523-4adc-9851-336360f4c11e","Type":"ContainerStarted","Data":"49d585eb6ef22d582d0ddfbfebfbcd4cce2cb3229ef96d8d2a2301f7bf20b0eb"} Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.111955 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:15:04 crc kubenswrapper[4773]: E0122 12:15:04.112309 4773 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 12:15:04 crc kubenswrapper[4773]: E0122 12:15:04.112979 4773 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 12:15:04 crc kubenswrapper[4773]: E0122 12:15:04.113056 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift podName:6d5ae4d3-bfc2-4d06-a84e-dc56e250514c nodeName:}" failed. No retries permitted until 2026-01-22 12:15:12.113033552 +0000 UTC m=+1219.691149377 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift") pod "swift-storage-0" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c") : configmap "swift-ring-files" not found Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.142741 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.215227 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-secret-volume\") pod \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.215639 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbmkx\" (UniqueName: \"kubernetes.io/projected/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-kube-api-access-kbmkx\") pod \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.215774 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-config-volume\") pod \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\" (UID: \"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38\") " Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.216690 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-config-volume" (OuterVolumeSpecName: "config-volume") pod "9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" (UID: "9e288fcc-8bcb-4e2e-83fe-c17aae51ad38"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.224084 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-kube-api-access-kbmkx" (OuterVolumeSpecName: "kube-api-access-kbmkx") pod "9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" (UID: "9e288fcc-8bcb-4e2e-83fe-c17aae51ad38"). InnerVolumeSpecName "kube-api-access-kbmkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.224401 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" (UID: "9e288fcc-8bcb-4e2e-83fe-c17aae51ad38"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.318327 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.318381 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbmkx\" (UniqueName: \"kubernetes.io/projected/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-kube-api-access-kbmkx\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.318399 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.572080 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" event={"ID":"9e288fcc-8bcb-4e2e-83fe-c17aae51ad38","Type":"ContainerDied","Data":"1928d4fea1d65b2f39e9103a598b67db16bf52acd3adfd9c088fd97f82097435"} Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.572370 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1928d4fea1d65b2f39e9103a598b67db16bf52acd3adfd9c088fd97f82097435" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.572534 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj" Jan 22 12:15:04 crc kubenswrapper[4773]: I0122 12:15:04.848106 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.031486 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.108099 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-qbbx4"] Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.108557 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="dnsmasq-dns" containerID="cri-o://5af5bfb6cd3cdbbf101349122e170389e4026d37498b841f5ce9005c5128e4ce" gracePeriod=10 Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.584723 4773 generic.go:334] "Generic (PLEG): container finished" podID="c60c2e8e-872f-448a-a799-6c49732c906d" containerID="5af5bfb6cd3cdbbf101349122e170389e4026d37498b841f5ce9005c5128e4ce" exitCode=0 Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.584833 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" event={"ID":"c60c2e8e-872f-448a-a799-6c49732c906d","Type":"ContainerDied","Data":"5af5bfb6cd3cdbbf101349122e170389e4026d37498b841f5ce9005c5128e4ce"} Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.679675 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-kf6jn"] Jan 22 12:15:05 crc kubenswrapper[4773]: I0122 12:15:05.689043 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-kf6jn"] Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.199508 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.362655 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-config\") pod \"c60c2e8e-872f-448a-a799-6c49732c906d\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.363249 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-sb\") pod \"c60c2e8e-872f-448a-a799-6c49732c906d\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.363450 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-dns-svc\") pod \"c60c2e8e-872f-448a-a799-6c49732c906d\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.363503 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrjmj\" (UniqueName: \"kubernetes.io/projected/c60c2e8e-872f-448a-a799-6c49732c906d-kube-api-access-wrjmj\") pod \"c60c2e8e-872f-448a-a799-6c49732c906d\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.363613 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-nb\") pod \"c60c2e8e-872f-448a-a799-6c49732c906d\" (UID: \"c60c2e8e-872f-448a-a799-6c49732c906d\") " Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.380975 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c60c2e8e-872f-448a-a799-6c49732c906d-kube-api-access-wrjmj" (OuterVolumeSpecName: "kube-api-access-wrjmj") pod "c60c2e8e-872f-448a-a799-6c49732c906d" (UID: "c60c2e8e-872f-448a-a799-6c49732c906d"). InnerVolumeSpecName "kube-api-access-wrjmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.427217 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c60c2e8e-872f-448a-a799-6c49732c906d" (UID: "c60c2e8e-872f-448a-a799-6c49732c906d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.428669 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-config" (OuterVolumeSpecName: "config") pod "c60c2e8e-872f-448a-a799-6c49732c906d" (UID: "c60c2e8e-872f-448a-a799-6c49732c906d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.430900 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c60c2e8e-872f-448a-a799-6c49732c906d" (UID: "c60c2e8e-872f-448a-a799-6c49732c906d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.436006 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c60c2e8e-872f-448a-a799-6c49732c906d" (UID: "c60c2e8e-872f-448a-a799-6c49732c906d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.466859 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.466902 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.466920 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.466932 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrjmj\" (UniqueName: \"kubernetes.io/projected/c60c2e8e-872f-448a-a799-6c49732c906d-kube-api-access-wrjmj\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.466948 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c60c2e8e-872f-448a-a799-6c49732c906d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.612148 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" event={"ID":"c60c2e8e-872f-448a-a799-6c49732c906d","Type":"ContainerDied","Data":"c28076ea979ad086f041be9bf8e65fadc6874b48b5bf915fdd5a264249dc5483"} Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.612244 4773 scope.go:117] "RemoveContainer" containerID="5af5bfb6cd3cdbbf101349122e170389e4026d37498b841f5ce9005c5128e4ce" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.612525 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.623906 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"21901911-8523-4adc-9851-336360f4c11e","Type":"ContainerStarted","Data":"c527c488066f9ecb13a4d3372005670c6e133edf7d8f14ab25f338473ff9cd61"} Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.623960 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"21901911-8523-4adc-9851-336360f4c11e","Type":"ContainerStarted","Data":"a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b"} Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.625545 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.640853 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bbfh5" event={"ID":"1ee43913-badd-4397-a999-70b306ca56c3","Type":"ContainerStarted","Data":"85dfcb89d3acb7001ec4f59fefda1afa3e6872c76587b0b797769f9330c1fc35"} Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.657418 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.26374398 podStartE2EDuration="5.65739723s" podCreationTimestamp="2026-01-22 12:15:01 +0000 UTC" firstStartedPulling="2026-01-22 12:15:02.596862593 +0000 UTC m=+1210.174978428" lastFinishedPulling="2026-01-22 12:15:05.990515853 +0000 UTC m=+1213.568631678" observedRunningTime="2026-01-22 12:15:06.653888415 +0000 UTC m=+1214.232004260" watchObservedRunningTime="2026-01-22 12:15:06.65739723 +0000 UTC m=+1214.235513065" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.665957 4773 scope.go:117] "RemoveContainer" containerID="2e0b4b0189a1737da3863eba6cdcb906c3410a9e5f98161cc2a5cf66ee3ea5b6" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.673757 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2a093ed-7e51-4caa-b6d7-16af5e03444b" path="/var/lib/kubelet/pods/f2a093ed-7e51-4caa-b6d7-16af5e03444b/volumes" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.681442 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-bbfh5" podStartSLOduration=2.443130753 podStartE2EDuration="7.681419459s" podCreationTimestamp="2026-01-22 12:14:59 +0000 UTC" firstStartedPulling="2026-01-22 12:15:00.749383231 +0000 UTC m=+1208.327499056" lastFinishedPulling="2026-01-22 12:15:05.987671937 +0000 UTC m=+1213.565787762" observedRunningTime="2026-01-22 12:15:06.674930445 +0000 UTC m=+1214.253046270" watchObservedRunningTime="2026-01-22 12:15:06.681419459 +0000 UTC m=+1214.259535284" Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.716305 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-qbbx4"] Jan 22 12:15:06 crc kubenswrapper[4773]: I0122 12:15:06.729051 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-qbbx4"] Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.299208 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-rqktn"] Jan 22 12:15:08 crc kubenswrapper[4773]: E0122 12:15:08.300659 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a093ed-7e51-4caa-b6d7-16af5e03444b" containerName="mariadb-account-create-update" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.300693 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a093ed-7e51-4caa-b6d7-16af5e03444b" containerName="mariadb-account-create-update" Jan 22 12:15:08 crc kubenswrapper[4773]: E0122 12:15:08.300805 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="init" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.300815 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="init" Jan 22 12:15:08 crc kubenswrapper[4773]: E0122 12:15:08.300846 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700990cd-025d-4ee8-9fc1-8c82488ed341" containerName="mariadb-database-create" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.300854 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="700990cd-025d-4ee8-9fc1-8c82488ed341" containerName="mariadb-database-create" Jan 22 12:15:08 crc kubenswrapper[4773]: E0122 12:15:08.300867 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" containerName="collect-profiles" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.300877 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" containerName="collect-profiles" Jan 22 12:15:08 crc kubenswrapper[4773]: E0122 12:15:08.301030 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="dnsmasq-dns" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301044 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="dnsmasq-dns" Jan 22 12:15:08 crc kubenswrapper[4773]: E0122 12:15:08.301053 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eadfca17-cc73-4d86-a5b4-6682fbe8d49d" containerName="mariadb-account-create-update" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301060 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="eadfca17-cc73-4d86-a5b4-6682fbe8d49d" containerName="mariadb-account-create-update" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301516 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="700990cd-025d-4ee8-9fc1-8c82488ed341" containerName="mariadb-database-create" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301543 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="eadfca17-cc73-4d86-a5b4-6682fbe8d49d" containerName="mariadb-account-create-update" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301553 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" containerName="collect-profiles" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301562 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2a093ed-7e51-4caa-b6d7-16af5e03444b" containerName="mariadb-account-create-update" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.301579 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="dnsmasq-dns" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.302237 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.304792 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tmks6" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.305894 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.321696 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rqktn"] Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.400468 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-db-sync-config-data\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.400536 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-combined-ca-bundle\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.400561 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fblcd\" (UniqueName: \"kubernetes.io/projected/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-kube-api-access-fblcd\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.400603 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-config-data\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.502271 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fblcd\" (UniqueName: \"kubernetes.io/projected/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-kube-api-access-fblcd\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.502367 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-config-data\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.502462 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-db-sync-config-data\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.502504 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-combined-ca-bundle\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.507427 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-combined-ca-bundle\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.507678 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-db-sync-config-data\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.512255 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-config-data\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.531101 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fblcd\" (UniqueName: \"kubernetes.io/projected/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-kube-api-access-fblcd\") pod \"glance-db-sync-rqktn\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.621462 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:08 crc kubenswrapper[4773]: I0122 12:15:08.668359 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" path="/var/lib/kubelet/pods/c60c2e8e-872f-448a-a799-6c49732c906d/volumes" Jan 22 12:15:09 crc kubenswrapper[4773]: I0122 12:15:09.257095 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rqktn"] Jan 22 12:15:09 crc kubenswrapper[4773]: I0122 12:15:09.663815 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rqktn" event={"ID":"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41","Type":"ContainerStarted","Data":"ba40fa810d9b613dd57ab64a71d22eb9c7e8adc60a4fdee5ba5838f13a0649dd"} Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.704755 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-fw8rf"] Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.706229 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.708776 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.713109 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-fw8rf"] Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.867440 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm98h\" (UniqueName: \"kubernetes.io/projected/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-kube-api-access-sm98h\") pod \"root-account-create-update-fw8rf\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.867869 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-operator-scripts\") pod \"root-account-create-update-fw8rf\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.970208 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-operator-scripts\") pod \"root-account-create-update-fw8rf\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.970744 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm98h\" (UniqueName: \"kubernetes.io/projected/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-kube-api-access-sm98h\") pod \"root-account-create-update-fw8rf\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.971115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-operator-scripts\") pod \"root-account-create-update-fw8rf\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:10 crc kubenswrapper[4773]: I0122 12:15:10.996140 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm98h\" (UniqueName: \"kubernetes.io/projected/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-kube-api-access-sm98h\") pod \"root-account-create-update-fw8rf\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:11 crc kubenswrapper[4773]: I0122 12:15:11.038388 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:11 crc kubenswrapper[4773]: I0122 12:15:11.185357 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-586b989cdc-qbbx4" podUID="c60c2e8e-872f-448a-a799-6c49732c906d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Jan 22 12:15:11 crc kubenswrapper[4773]: I0122 12:15:11.513212 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-fw8rf"] Jan 22 12:15:11 crc kubenswrapper[4773]: W0122 12:15:11.526186 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a7e6140_b9e5_454d_8cdd_accf9ee8f5d5.slice/crio-58a1811d86feac84ba2a2a0ddba9f2039d8f59ea4f296ea5cf3e68f16deb52f0 WatchSource:0}: Error finding container 58a1811d86feac84ba2a2a0ddba9f2039d8f59ea4f296ea5cf3e68f16deb52f0: Status 404 returned error can't find the container with id 58a1811d86feac84ba2a2a0ddba9f2039d8f59ea4f296ea5cf3e68f16deb52f0 Jan 22 12:15:11 crc kubenswrapper[4773]: I0122 12:15:11.686518 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-fw8rf" event={"ID":"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5","Type":"ContainerStarted","Data":"58a1811d86feac84ba2a2a0ddba9f2039d8f59ea4f296ea5cf3e68f16deb52f0"} Jan 22 12:15:12 crc kubenswrapper[4773]: I0122 12:15:12.201022 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:15:12 crc kubenswrapper[4773]: E0122 12:15:12.201613 4773 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 22 12:15:12 crc kubenswrapper[4773]: E0122 12:15:12.201634 4773 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 22 12:15:12 crc kubenswrapper[4773]: E0122 12:15:12.201689 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift podName:6d5ae4d3-bfc2-4d06-a84e-dc56e250514c nodeName:}" failed. No retries permitted until 2026-01-22 12:15:28.201668494 +0000 UTC m=+1235.779784319 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift") pod "swift-storage-0" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c") : configmap "swift-ring-files" not found Jan 22 12:15:12 crc kubenswrapper[4773]: I0122 12:15:12.699162 4773 generic.go:334] "Generic (PLEG): container finished" podID="1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" containerID="2de153ceb8260e4aaf9c5eb8db18fd21a627cc2a8243dd5b0c08d3df7a9725b0" exitCode=0 Jan 22 12:15:12 crc kubenswrapper[4773]: I0122 12:15:12.699229 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-fw8rf" event={"ID":"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5","Type":"ContainerDied","Data":"2de153ceb8260e4aaf9c5eb8db18fd21a627cc2a8243dd5b0c08d3df7a9725b0"} Jan 22 12:15:12 crc kubenswrapper[4773]: I0122 12:15:12.701448 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a4c14d2f-5507-4d08-be37-55d77b5491a3","Type":"ContainerDied","Data":"1c92b816dc78dcade34afa861b434a64234cddd6198a1d85b73b1ca7f992f05c"} Jan 22 12:15:12 crc kubenswrapper[4773]: I0122 12:15:12.701420 4773 generic.go:334] "Generic (PLEG): container finished" podID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerID="1c92b816dc78dcade34afa861b434a64234cddd6198a1d85b73b1ca7f992f05c" exitCode=0 Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.673786 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.675818 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.735490 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a4c14d2f-5507-4d08-be37-55d77b5491a3","Type":"ContainerStarted","Data":"571196d2d3c006d03c5548409716302b0c8d1e601a474e5f6e89f54c68e781d2"} Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.735820 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.738531 4773 generic.go:334] "Generic (PLEG): container finished" podID="a688a76d-2498-4542-8285-709caf211e8a" containerID="e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9" exitCode=0 Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.738609 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a688a76d-2498-4542-8285-709caf211e8a","Type":"ContainerDied","Data":"e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9"} Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.793034 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.620613119 podStartE2EDuration="1m26.793007974s" podCreationTimestamp="2026-01-22 12:13:47 +0000 UTC" firstStartedPulling="2026-01-22 12:13:50.866150577 +0000 UTC m=+1138.444266402" lastFinishedPulling="2026-01-22 12:14:39.038545432 +0000 UTC m=+1186.616661257" observedRunningTime="2026-01-22 12:15:13.776619253 +0000 UTC m=+1221.354735088" watchObservedRunningTime="2026-01-22 12:15:13.793007974 +0000 UTC m=+1221.371123799" Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.962109 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-rp6dh-config-dncnk"] Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.963757 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.974360 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 22 12:15:13 crc kubenswrapper[4773]: I0122 12:15:13.981169 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rp6dh-config-dncnk"] Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.044262 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-scripts\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.044958 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run-ovn\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.045017 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.045084 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxzgb\" (UniqueName: \"kubernetes.io/projected/260540d6-8325-4d8a-afdd-cd59584ac611-kube-api-access-hxzgb\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.045113 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-additional-scripts\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.045137 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-log-ovn\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.147711 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-scripts\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.147794 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run-ovn\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.147842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.147908 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxzgb\" (UniqueName: \"kubernetes.io/projected/260540d6-8325-4d8a-afdd-cd59584ac611-kube-api-access-hxzgb\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.147939 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-additional-scripts\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.147964 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-log-ovn\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.148410 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-log-ovn\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.148553 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.148631 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run-ovn\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.149278 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-additional-scripts\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.150364 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-scripts\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.173408 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxzgb\" (UniqueName: \"kubernetes.io/projected/260540d6-8325-4d8a-afdd-cd59584ac611-kube-api-access-hxzgb\") pod \"ovn-controller-rp6dh-config-dncnk\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.195721 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.301461 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.351655 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm98h\" (UniqueName: \"kubernetes.io/projected/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-kube-api-access-sm98h\") pod \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.351780 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-operator-scripts\") pod \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\" (UID: \"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5\") " Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.352571 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" (UID: "1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.359557 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-kube-api-access-sm98h" (OuterVolumeSpecName: "kube-api-access-sm98h") pod "1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" (UID: "1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5"). InnerVolumeSpecName "kube-api-access-sm98h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.453682 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm98h\" (UniqueName: \"kubernetes.io/projected/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-kube-api-access-sm98h\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.453713 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.731117 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-rp6dh-config-dncnk"] Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.755106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-fw8rf" event={"ID":"1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5","Type":"ContainerDied","Data":"58a1811d86feac84ba2a2a0ddba9f2039d8f59ea4f296ea5cf3e68f16deb52f0"} Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.755164 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-fw8rf" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.755170 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58a1811d86feac84ba2a2a0ddba9f2039d8f59ea4f296ea5cf3e68f16deb52f0" Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.759045 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a688a76d-2498-4542-8285-709caf211e8a","Type":"ContainerStarted","Data":"2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c"} Jan 22 12:15:14 crc kubenswrapper[4773]: I0122 12:15:14.759874 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 22 12:15:15 crc kubenswrapper[4773]: I0122 12:15:15.231929 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=41.252989551 podStartE2EDuration="1m29.231906838s" podCreationTimestamp="2026-01-22 12:13:46 +0000 UTC" firstStartedPulling="2026-01-22 12:13:50.473070754 +0000 UTC m=+1138.051186579" lastFinishedPulling="2026-01-22 12:14:38.451988041 +0000 UTC m=+1186.030103866" observedRunningTime="2026-01-22 12:15:14.787167405 +0000 UTC m=+1222.365283230" watchObservedRunningTime="2026-01-22 12:15:15.231906838 +0000 UTC m=+1222.810022663" Jan 22 12:15:15 crc kubenswrapper[4773]: I0122 12:15:15.772056 4773 generic.go:334] "Generic (PLEG): container finished" podID="260540d6-8325-4d8a-afdd-cd59584ac611" containerID="87c1a40a3866ca565feb721acdc98dd566ca7d2caac09a1e364b6fe3fe20adae" exitCode=0 Jan 22 12:15:15 crc kubenswrapper[4773]: I0122 12:15:15.772098 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh-config-dncnk" event={"ID":"260540d6-8325-4d8a-afdd-cd59584ac611","Type":"ContainerDied","Data":"87c1a40a3866ca565feb721acdc98dd566ca7d2caac09a1e364b6fe3fe20adae"} Jan 22 12:15:15 crc kubenswrapper[4773]: I0122 12:15:15.772332 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh-config-dncnk" event={"ID":"260540d6-8325-4d8a-afdd-cd59584ac611","Type":"ContainerStarted","Data":"19d5271185dc52f0cb43253f8b7f776f2aafd3ef9a10c7803fd9f6f3627b560a"} Jan 22 12:15:15 crc kubenswrapper[4773]: I0122 12:15:15.775621 4773 generic.go:334] "Generic (PLEG): container finished" podID="1ee43913-badd-4397-a999-70b306ca56c3" containerID="85dfcb89d3acb7001ec4f59fefda1afa3e6872c76587b0b797769f9330c1fc35" exitCode=0 Jan 22 12:15:15 crc kubenswrapper[4773]: I0122 12:15:15.775978 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bbfh5" event={"ID":"1ee43913-badd-4397-a999-70b306ca56c3","Type":"ContainerDied","Data":"85dfcb89d3acb7001ec4f59fefda1afa3e6872c76587b0b797769f9330c1fc35"} Jan 22 12:15:17 crc kubenswrapper[4773]: I0122 12:15:17.538235 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 22 12:15:18 crc kubenswrapper[4773]: I0122 12:15:18.620846 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-rp6dh" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.257729 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.269773 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"swift-storage-0\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " pod="openstack/swift-storage-0" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.315982 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 12:15:28 crc kubenswrapper[4773]: E0122 12:15:28.447696 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 22 12:15:28 crc kubenswrapper[4773]: E0122 12:15:28.448204 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fblcd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-rqktn_openstack(f9fe524c-8ecb-4e39-a1f6-2544aeb74b41): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:15:28 crc kubenswrapper[4773]: E0122 12:15:28.451458 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-rqktn" podUID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.536621 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.545862 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.614812 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665516 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv7gk\" (UniqueName: \"kubernetes.io/projected/1ee43913-badd-4397-a999-70b306ca56c3-kube-api-access-rv7gk\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665587 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-ring-data-devices\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665629 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run-ovn\") pod \"260540d6-8325-4d8a-afdd-cd59584ac611\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665652 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run\") pod \"260540d6-8325-4d8a-afdd-cd59584ac611\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665731 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-log-ovn\") pod \"260540d6-8325-4d8a-afdd-cd59584ac611\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665778 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/1ee43913-badd-4397-a999-70b306ca56c3-etc-swift\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665777 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "260540d6-8325-4d8a-afdd-cd59584ac611" (UID: "260540d6-8325-4d8a-afdd-cd59584ac611"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665803 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-dispersionconf\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665825 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-swiftconf\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665853 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-scripts\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665834 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "260540d6-8325-4d8a-afdd-cd59584ac611" (UID: "260540d6-8325-4d8a-afdd-cd59584ac611"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665879 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-scripts\") pod \"260540d6-8325-4d8a-afdd-cd59584ac611\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665892 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run" (OuterVolumeSpecName: "var-run") pod "260540d6-8325-4d8a-afdd-cd59584ac611" (UID: "260540d6-8325-4d8a-afdd-cd59584ac611"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665908 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxzgb\" (UniqueName: \"kubernetes.io/projected/260540d6-8325-4d8a-afdd-cd59584ac611-kube-api-access-hxzgb\") pod \"260540d6-8325-4d8a-afdd-cd59584ac611\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665931 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-combined-ca-bundle\") pod \"1ee43913-badd-4397-a999-70b306ca56c3\" (UID: \"1ee43913-badd-4397-a999-70b306ca56c3\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.665958 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-additional-scripts\") pod \"260540d6-8325-4d8a-afdd-cd59584ac611\" (UID: \"260540d6-8325-4d8a-afdd-cd59584ac611\") " Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.666272 4773 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.666301 4773 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.666310 4773 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/260540d6-8325-4d8a-afdd-cd59584ac611-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.666783 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.698686 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ee43913-badd-4397-a999-70b306ca56c3-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.706716 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/260540d6-8325-4d8a-afdd-cd59584ac611-kube-api-access-hxzgb" (OuterVolumeSpecName: "kube-api-access-hxzgb") pod "260540d6-8325-4d8a-afdd-cd59584ac611" (UID: "260540d6-8325-4d8a-afdd-cd59584ac611"). InnerVolumeSpecName "kube-api-access-hxzgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.716477 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ee43913-badd-4397-a999-70b306ca56c3-kube-api-access-rv7gk" (OuterVolumeSpecName: "kube-api-access-rv7gk") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "kube-api-access-rv7gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.716732 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "260540d6-8325-4d8a-afdd-cd59584ac611" (UID: "260540d6-8325-4d8a-afdd-cd59584ac611"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.722539 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.722700 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.724441 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-scripts" (OuterVolumeSpecName: "scripts") pod "260540d6-8325-4d8a-afdd-cd59584ac611" (UID: "260540d6-8325-4d8a-afdd-cd59584ac611"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.725690 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-scripts" (OuterVolumeSpecName: "scripts") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.733710 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ee43913-badd-4397-a999-70b306ca56c3" (UID: "1ee43913-badd-4397-a999-70b306ca56c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767432 4773 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/1ee43913-badd-4397-a999-70b306ca56c3-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767470 4773 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767483 4773 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767497 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767507 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767518 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxzgb\" (UniqueName: \"kubernetes.io/projected/260540d6-8325-4d8a-afdd-cd59584ac611-kube-api-access-hxzgb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767529 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ee43913-badd-4397-a999-70b306ca56c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767541 4773 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/260540d6-8325-4d8a-afdd-cd59584ac611-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767552 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv7gk\" (UniqueName: \"kubernetes.io/projected/1ee43913-badd-4397-a999-70b306ca56c3-kube-api-access-rv7gk\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:28 crc kubenswrapper[4773]: I0122 12:15:28.767562 4773 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/1ee43913-badd-4397-a999-70b306ca56c3-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:28.989180 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 22 12:15:29 crc kubenswrapper[4773]: W0122 12:15:28.994643 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d5ae4d3_bfc2_4d06_a84e_dc56e250514c.slice/crio-3568ce9da9c8d8fe7d9f3751dfe7e97a31d37c821a01d04d9411e597b1b46999 WatchSource:0}: Error finding container 3568ce9da9c8d8fe7d9f3751dfe7e97a31d37c821a01d04d9411e597b1b46999: Status 404 returned error can't find the container with id 3568ce9da9c8d8fe7d9f3751dfe7e97a31d37c821a01d04d9411e597b1b46999 Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.104798 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"3568ce9da9c8d8fe7d9f3751dfe7e97a31d37c821a01d04d9411e597b1b46999"} Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.106213 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh-config-dncnk" Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.106420 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh-config-dncnk" event={"ID":"260540d6-8325-4d8a-afdd-cd59584ac611","Type":"ContainerDied","Data":"19d5271185dc52f0cb43253f8b7f776f2aafd3ef9a10c7803fd9f6f3627b560a"} Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.106488 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19d5271185dc52f0cb43253f8b7f776f2aafd3ef9a10c7803fd9f6f3627b560a" Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.111195 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-bbfh5" Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.111378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-bbfh5" event={"ID":"1ee43913-badd-4397-a999-70b306ca56c3","Type":"ContainerDied","Data":"c906fff5421d366ea136587bb41082007bea340d315691ff18e0e141431617a9"} Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.111412 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c906fff5421d366ea136587bb41082007bea340d315691ff18e0e141431617a9" Jan 22 12:15:29 crc kubenswrapper[4773]: E0122 12:15:29.112846 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-rqktn" podUID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.616591 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.651925 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-rp6dh-config-dncnk"] Jan 22 12:15:29 crc kubenswrapper[4773]: I0122 12:15:29.673946 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-rp6dh-config-dncnk"] Jan 22 12:15:30 crc kubenswrapper[4773]: I0122 12:15:30.668243 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="260540d6-8325-4d8a-afdd-cd59584ac611" path="/var/lib/kubelet/pods/260540d6-8325-4d8a-afdd-cd59584ac611/volumes" Jan 22 12:15:31 crc kubenswrapper[4773]: I0122 12:15:31.133897 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"8614587119ff68cbab75cc2f49c17c865dbaaace22e54b8c99cb0c2c1183280b"} Jan 22 12:15:31 crc kubenswrapper[4773]: I0122 12:15:31.133952 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"4550c34e059dc4785794eb4b0ed847aef46b118c21bcc68b6e25d13c02b29550"} Jan 22 12:15:31 crc kubenswrapper[4773]: I0122 12:15:31.133965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"e9546b98e820ca3d0bc3c7f7f6af1821b37a056264052a38280687379bf170a4"} Jan 22 12:15:32 crc kubenswrapper[4773]: I0122 12:15:32.143979 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"186e2a68f59e8f83347891cfa964ee1ba154ccbd86503a5b5bcfcac2d36382b6"} Jan 22 12:15:33 crc kubenswrapper[4773]: I0122 12:15:33.155874 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"fce77a5acf624e2bfa4fbd706de0a7bc45675dcc2a670de3f60e284d1156e388"} Jan 22 12:15:34 crc kubenswrapper[4773]: I0122 12:15:34.181396 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"d468ff37ff2469b69cb119a74db751875822b460315803fe8a44e5bcdf6c0ecf"} Jan 22 12:15:34 crc kubenswrapper[4773]: I0122 12:15:34.181747 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"dd9af132571b4bed486d397dfe0215f073e7f36c6b34aa87e560ea6a3f50a32e"} Jan 22 12:15:34 crc kubenswrapper[4773]: I0122 12:15:34.181763 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"72835dd244d879ebb03d43571334445f863b46f2f1f1ee2655d94a3e3907e8b5"} Jan 22 12:15:35 crc kubenswrapper[4773]: I0122 12:15:35.196480 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"c18e352494406d6cec346bf8c21ed393d59ff5e3ef7616cda1e8dcff47f2665d"} Jan 22 12:15:35 crc kubenswrapper[4773]: I0122 12:15:35.197039 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"049e38f5a87408bec16aa9630dee0dfcbba21d9d01cf8238d9222592a966b9a8"} Jan 22 12:15:36 crc kubenswrapper[4773]: I0122 12:15:36.217496 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"cf1b9f6bfafc8703b57d6396414ff00ca8d8f849a42e8a643dd6012f8b0ce046"} Jan 22 12:15:36 crc kubenswrapper[4773]: I0122 12:15:36.217808 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"08cda7f85172a8a3837740fbc97bd5edfa4527dcc2c0715be2ebaf2bac22e6b7"} Jan 22 12:15:36 crc kubenswrapper[4773]: I0122 12:15:36.217824 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"88011f552abb27872acf5ebae2c74881b96ee28ef4202e35cd34c74e6bc5e417"} Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.231520 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"703a45160239152c8b7d740aeea8cc73cd6c09b0a35fa470daa71bdf7f10e577"} Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.231573 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerStarted","Data":"dd185ed78ea8d0ba5be7c159d3c9510adff0ac36b806be4c8fe7fd52820b4ab1"} Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.274035 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=37.475859759 podStartE2EDuration="43.274010528s" podCreationTimestamp="2026-01-22 12:14:54 +0000 UTC" firstStartedPulling="2026-01-22 12:15:28.996266211 +0000 UTC m=+1236.574382036" lastFinishedPulling="2026-01-22 12:15:34.79441697 +0000 UTC m=+1242.372532805" observedRunningTime="2026-01-22 12:15:37.268658307 +0000 UTC m=+1244.846774172" watchObservedRunningTime="2026-01-22 12:15:37.274010528 +0000 UTC m=+1244.852126353" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.540523 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8db84466c-mm9ts"] Jan 22 12:15:37 crc kubenswrapper[4773]: E0122 12:15:37.540981 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="260540d6-8325-4d8a-afdd-cd59584ac611" containerName="ovn-config" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.541007 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="260540d6-8325-4d8a-afdd-cd59584ac611" containerName="ovn-config" Jan 22 12:15:37 crc kubenswrapper[4773]: E0122 12:15:37.541020 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ee43913-badd-4397-a999-70b306ca56c3" containerName="swift-ring-rebalance" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.541030 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ee43913-badd-4397-a999-70b306ca56c3" containerName="swift-ring-rebalance" Jan 22 12:15:37 crc kubenswrapper[4773]: E0122 12:15:37.541051 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" containerName="mariadb-account-create-update" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.541066 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" containerName="mariadb-account-create-update" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.541325 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="260540d6-8325-4d8a-afdd-cd59584ac611" containerName="ovn-config" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.541355 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ee43913-badd-4397-a999-70b306ca56c3" containerName="swift-ring-rebalance" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.541368 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" containerName="mariadb-account-create-update" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.542404 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.543917 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.556526 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-mm9ts"] Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.667521 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.667707 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-config\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.667949 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.668060 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-svc\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.668239 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.668403 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5hmf\" (UniqueName: \"kubernetes.io/projected/c9b10ad1-cb79-4231-b540-04a494438839-kube-api-access-z5hmf\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.769596 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.769683 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5hmf\" (UniqueName: \"kubernetes.io/projected/c9b10ad1-cb79-4231-b540-04a494438839-kube-api-access-z5hmf\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.769731 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.769777 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-config\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.769826 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.769885 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-svc\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.771423 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.771486 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-svc\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.771554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.771583 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-config\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.771701 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.792814 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5hmf\" (UniqueName: \"kubernetes.io/projected/c9b10ad1-cb79-4231-b540-04a494438839-kube-api-access-z5hmf\") pod \"dnsmasq-dns-8db84466c-mm9ts\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:37 crc kubenswrapper[4773]: I0122 12:15:37.860052 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.292579 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-mm9ts"] Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.614569 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.958729 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-zp46d"] Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.961015 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.976235 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-zp46d"] Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.991535 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-4475-account-create-update-crt4r"] Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.992810 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:38 crc kubenswrapper[4773]: I0122 12:15:38.994616 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.029298 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-4475-account-create-update-crt4r"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.082168 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-sk55j"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.083530 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.092116 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0af4-account-create-update-j924j"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.092603 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpgcq\" (UniqueName: \"kubernetes.io/projected/7054d5a6-f005-40f2-bcfe-95835a0aa45d-kube-api-access-wpgcq\") pod \"cinder-4475-account-create-update-crt4r\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.092680 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-operator-scripts\") pod \"cinder-db-create-zp46d\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.092730 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhrdk\" (UniqueName: \"kubernetes.io/projected/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-kube-api-access-xhrdk\") pod \"cinder-db-create-zp46d\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.092766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7054d5a6-f005-40f2-bcfe-95835a0aa45d-operator-scripts\") pod \"cinder-4475-account-create-update-crt4r\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.093214 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.096786 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.100774 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-sk55j"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.110178 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0af4-account-create-update-j924j"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194251 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ps4q\" (UniqueName: \"kubernetes.io/projected/4e1843f0-45c3-4e84-ab98-b7db909e37bd-kube-api-access-5ps4q\") pod \"barbican-db-create-sk55j\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194346 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpgcq\" (UniqueName: \"kubernetes.io/projected/7054d5a6-f005-40f2-bcfe-95835a0aa45d-kube-api-access-wpgcq\") pod \"cinder-4475-account-create-update-crt4r\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194403 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e1843f0-45c3-4e84-ab98-b7db909e37bd-operator-scripts\") pod \"barbican-db-create-sk55j\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194613 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0419433e-4ad6-4e7e-acb0-e769c5ba611c-operator-scripts\") pod \"barbican-0af4-account-create-update-j924j\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194663 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-operator-scripts\") pod \"cinder-db-create-zp46d\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194717 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhrdk\" (UniqueName: \"kubernetes.io/projected/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-kube-api-access-xhrdk\") pod \"cinder-db-create-zp46d\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194747 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csqtc\" (UniqueName: \"kubernetes.io/projected/0419433e-4ad6-4e7e-acb0-e769c5ba611c-kube-api-access-csqtc\") pod \"barbican-0af4-account-create-update-j924j\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.194773 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7054d5a6-f005-40f2-bcfe-95835a0aa45d-operator-scripts\") pod \"cinder-4475-account-create-update-crt4r\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.195820 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7054d5a6-f005-40f2-bcfe-95835a0aa45d-operator-scripts\") pod \"cinder-4475-account-create-update-crt4r\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.196492 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-operator-scripts\") pod \"cinder-db-create-zp46d\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.219492 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhrdk\" (UniqueName: \"kubernetes.io/projected/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-kube-api-access-xhrdk\") pod \"cinder-db-create-zp46d\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.219945 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpgcq\" (UniqueName: \"kubernetes.io/projected/7054d5a6-f005-40f2-bcfe-95835a0aa45d-kube-api-access-wpgcq\") pod \"cinder-4475-account-create-update-crt4r\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.249952 4773 generic.go:334] "Generic (PLEG): container finished" podID="c9b10ad1-cb79-4231-b540-04a494438839" containerID="d2c64ed08bd7d2c55f11bd50f0b6739a7c972c8d23e4edffd7b73003cc73a03c" exitCode=0 Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.249999 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" event={"ID":"c9b10ad1-cb79-4231-b540-04a494438839","Type":"ContainerDied","Data":"d2c64ed08bd7d2c55f11bd50f0b6739a7c972c8d23e4edffd7b73003cc73a03c"} Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.250036 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" event={"ID":"c9b10ad1-cb79-4231-b540-04a494438839","Type":"ContainerStarted","Data":"e4bd37843ee1c25b9a1e1ed82481b0e74d42ec2dbb547eb3fdbe204cbc33cff3"} Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.278714 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.316088 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.324981 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ps4q\" (UniqueName: \"kubernetes.io/projected/4e1843f0-45c3-4e84-ab98-b7db909e37bd-kube-api-access-5ps4q\") pod \"barbican-db-create-sk55j\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.325145 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e1843f0-45c3-4e84-ab98-b7db909e37bd-operator-scripts\") pod \"barbican-db-create-sk55j\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.325179 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0419433e-4ad6-4e7e-acb0-e769c5ba611c-operator-scripts\") pod \"barbican-0af4-account-create-update-j924j\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.325332 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csqtc\" (UniqueName: \"kubernetes.io/projected/0419433e-4ad6-4e7e-acb0-e769c5ba611c-kube-api-access-csqtc\") pod \"barbican-0af4-account-create-update-j924j\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.326214 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0419433e-4ad6-4e7e-acb0-e769c5ba611c-operator-scripts\") pod \"barbican-0af4-account-create-update-j924j\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.326793 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e1843f0-45c3-4e84-ab98-b7db909e37bd-operator-scripts\") pod \"barbican-db-create-sk55j\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.366013 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ps4q\" (UniqueName: \"kubernetes.io/projected/4e1843f0-45c3-4e84-ab98-b7db909e37bd-kube-api-access-5ps4q\") pod \"barbican-db-create-sk55j\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.367616 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-v7xxw"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.370019 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.378976 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-92k4l" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.379231 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.379350 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.379899 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.383313 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v7xxw"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.401532 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.410898 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csqtc\" (UniqueName: \"kubernetes.io/projected/0419433e-4ad6-4e7e-acb0-e769c5ba611c-kube-api-access-csqtc\") pod \"barbican-0af4-account-create-update-j924j\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.468355 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-5ztvh"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.469351 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.482643 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-5ztvh"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.539395 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-combined-ca-bundle\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.539468 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-config-data\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.539557 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87sts\" (UniqueName: \"kubernetes.io/projected/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-kube-api-access-87sts\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.590069 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ef71-account-create-update-2zrtj"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.591942 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.596302 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.601272 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ef71-account-create-update-2zrtj"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.641660 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dg42\" (UniqueName: \"kubernetes.io/projected/a436a8ee-6b7a-4ea5-9056-c289241686e6-kube-api-access-2dg42\") pod \"neutron-db-create-5ztvh\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.641714 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87sts\" (UniqueName: \"kubernetes.io/projected/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-kube-api-access-87sts\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.641822 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-combined-ca-bundle\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.641850 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a436a8ee-6b7a-4ea5-9056-c289241686e6-operator-scripts\") pod \"neutron-db-create-5ztvh\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.641934 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-config-data\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.647616 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-config-data\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.647919 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-combined-ca-bundle\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.670942 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87sts\" (UniqueName: \"kubernetes.io/projected/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-kube-api-access-87sts\") pod \"keystone-db-sync-v7xxw\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.711198 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.743805 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a436a8ee-6b7a-4ea5-9056-c289241686e6-operator-scripts\") pod \"neutron-db-create-5ztvh\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.743862 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54460d78-28b8-49c7-85ec-d4333db4b86c-operator-scripts\") pod \"neutron-ef71-account-create-update-2zrtj\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.743882 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fvk4\" (UniqueName: \"kubernetes.io/projected/54460d78-28b8-49c7-85ec-d4333db4b86c-kube-api-access-7fvk4\") pod \"neutron-ef71-account-create-update-2zrtj\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.743952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dg42\" (UniqueName: \"kubernetes.io/projected/a436a8ee-6b7a-4ea5-9056-c289241686e6-kube-api-access-2dg42\") pod \"neutron-db-create-5ztvh\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.744809 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a436a8ee-6b7a-4ea5-9056-c289241686e6-operator-scripts\") pod \"neutron-db-create-5ztvh\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.750716 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.781893 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dg42\" (UniqueName: \"kubernetes.io/projected/a436a8ee-6b7a-4ea5-9056-c289241686e6-kube-api-access-2dg42\") pod \"neutron-db-create-5ztvh\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.815955 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.856776 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54460d78-28b8-49c7-85ec-d4333db4b86c-operator-scripts\") pod \"neutron-ef71-account-create-update-2zrtj\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.856835 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fvk4\" (UniqueName: \"kubernetes.io/projected/54460d78-28b8-49c7-85ec-d4333db4b86c-kube-api-access-7fvk4\") pod \"neutron-ef71-account-create-update-2zrtj\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.860179 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54460d78-28b8-49c7-85ec-d4333db4b86c-operator-scripts\") pod \"neutron-ef71-account-create-update-2zrtj\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.883808 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-sk55j"] Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.890089 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fvk4\" (UniqueName: \"kubernetes.io/projected/54460d78-28b8-49c7-85ec-d4333db4b86c-kube-api-access-7fvk4\") pod \"neutron-ef71-account-create-update-2zrtj\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.916775 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:39 crc kubenswrapper[4773]: I0122 12:15:39.983832 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-4475-account-create-update-crt4r"] Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.069207 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-zp46d"] Jan 22 12:15:40 crc kubenswrapper[4773]: W0122 12:15:40.120592 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fcf03f3_751b_4dd9_a44b_531164a3b4d4.slice/crio-dc43d7aaa745e2d6d6c8d25b01c85bd82ac6e46f27c18828f449687a086be3d0 WatchSource:0}: Error finding container dc43d7aaa745e2d6d6c8d25b01c85bd82ac6e46f27c18828f449687a086be3d0: Status 404 returned error can't find the container with id dc43d7aaa745e2d6d6c8d25b01c85bd82ac6e46f27c18828f449687a086be3d0 Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.259690 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-zp46d" event={"ID":"7fcf03f3-751b-4dd9-a44b-531164a3b4d4","Type":"ContainerStarted","Data":"dc43d7aaa745e2d6d6c8d25b01c85bd82ac6e46f27c18828f449687a086be3d0"} Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.262133 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-sk55j" event={"ID":"4e1843f0-45c3-4e84-ab98-b7db909e37bd","Type":"ContainerStarted","Data":"2204090ac02b68b32a14007959e8d3ed2c3b2bbfca585f4dd54b41bb7fdc82b7"} Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.269430 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-4475-account-create-update-crt4r" event={"ID":"7054d5a6-f005-40f2-bcfe-95835a0aa45d","Type":"ContainerStarted","Data":"38d635626cbeb311aef3be459e0d82bdd658d6edbcd46cbb8ab6a967562899c3"} Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.274666 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" event={"ID":"c9b10ad1-cb79-4231-b540-04a494438839","Type":"ContainerStarted","Data":"9f58fa38a9d9aa771343f7e1bb26057f5ba9c615f90418c8ff99eab8a04440e2"} Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.275329 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.302081 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" podStartSLOduration=3.302062816 podStartE2EDuration="3.302062816s" podCreationTimestamp="2026-01-22 12:15:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:15:40.298676185 +0000 UTC m=+1247.876792020" watchObservedRunningTime="2026-01-22 12:15:40.302062816 +0000 UTC m=+1247.880178641" Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.432470 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0af4-account-create-update-j924j"] Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.508457 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v7xxw"] Jan 22 12:15:40 crc kubenswrapper[4773]: W0122 12:15:40.512764 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd0c3eeb_ff3e_4718_932d_6453b4d6f189.slice/crio-444311f7a849595f6ea1084047b6f78c1d1590ed2a77420c398621ce0923fee5 WatchSource:0}: Error finding container 444311f7a849595f6ea1084047b6f78c1d1590ed2a77420c398621ce0923fee5: Status 404 returned error can't find the container with id 444311f7a849595f6ea1084047b6f78c1d1590ed2a77420c398621ce0923fee5 Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.522886 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-5ztvh"] Jan 22 12:15:40 crc kubenswrapper[4773]: I0122 12:15:40.651578 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ef71-account-create-update-2zrtj"] Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.287754 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-5ztvh" event={"ID":"a436a8ee-6b7a-4ea5-9056-c289241686e6","Type":"ContainerStarted","Data":"15c4231fb715135dd1011052cf51192cc080b32f3bddb8c8b960f54b1c47020b"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.288101 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-5ztvh" event={"ID":"a436a8ee-6b7a-4ea5-9056-c289241686e6","Type":"ContainerStarted","Data":"b04839476c80579496a84fb0125fa1dd89bd51157b31d5c34a47e21e9dfd9e91"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.291686 4773 generic.go:334] "Generic (PLEG): container finished" podID="7fcf03f3-751b-4dd9-a44b-531164a3b4d4" containerID="3ffb7fb743609268c9120667fae8f80a0ebb42bf2a809b8cde494ed6f0bc19ab" exitCode=0 Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.291794 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-zp46d" event={"ID":"7fcf03f3-751b-4dd9-a44b-531164a3b4d4","Type":"ContainerDied","Data":"3ffb7fb743609268c9120667fae8f80a0ebb42bf2a809b8cde494ed6f0bc19ab"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.297252 4773 generic.go:334] "Generic (PLEG): container finished" podID="4e1843f0-45c3-4e84-ab98-b7db909e37bd" containerID="5b0b9f019f40afbdc470bca3f7127866655fa613de315b150d30f0d4850cd7b0" exitCode=0 Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.297668 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-sk55j" event={"ID":"4e1843f0-45c3-4e84-ab98-b7db909e37bd","Type":"ContainerDied","Data":"5b0b9f019f40afbdc470bca3f7127866655fa613de315b150d30f0d4850cd7b0"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.300542 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ef71-account-create-update-2zrtj" event={"ID":"54460d78-28b8-49c7-85ec-d4333db4b86c","Type":"ContainerStarted","Data":"6887dd3dc0f08a8f0fb433235b8159ea7b606dd55c75cb71c766bc671c73a896"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.302833 4773 generic.go:334] "Generic (PLEG): container finished" podID="0419433e-4ad6-4e7e-acb0-e769c5ba611c" containerID="41f034e2200a964f491e83e9757b4cd91acd4b67ef1b1be157cb223ac98e6685" exitCode=0 Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.302970 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0af4-account-create-update-j924j" event={"ID":"0419433e-4ad6-4e7e-acb0-e769c5ba611c","Type":"ContainerDied","Data":"41f034e2200a964f491e83e9757b4cd91acd4b67ef1b1be157cb223ac98e6685"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.303062 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0af4-account-create-update-j924j" event={"ID":"0419433e-4ad6-4e7e-acb0-e769c5ba611c","Type":"ContainerStarted","Data":"449fbb8dfc5e74200ebe9fc6b81ff5487eb98aa9730e31379a2aea0752cf0703"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.310468 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v7xxw" event={"ID":"fd0c3eeb-ff3e-4718-932d-6453b4d6f189","Type":"ContainerStarted","Data":"444311f7a849595f6ea1084047b6f78c1d1590ed2a77420c398621ce0923fee5"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.314833 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-5ztvh" podStartSLOduration=2.314787593 podStartE2EDuration="2.314787593s" podCreationTimestamp="2026-01-22 12:15:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:15:41.310739041 +0000 UTC m=+1248.888854876" watchObservedRunningTime="2026-01-22 12:15:41.314787593 +0000 UTC m=+1248.892903418" Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.322679 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-4475-account-create-update-crt4r" event={"ID":"7054d5a6-f005-40f2-bcfe-95835a0aa45d","Type":"ContainerDied","Data":"26548a6cc894a5bb88b74830b61f9e63616ad47105eb4dc8ab517b549d825079"} Jan 22 12:15:41 crc kubenswrapper[4773]: I0122 12:15:41.323010 4773 generic.go:334] "Generic (PLEG): container finished" podID="7054d5a6-f005-40f2-bcfe-95835a0aa45d" containerID="26548a6cc894a5bb88b74830b61f9e63616ad47105eb4dc8ab517b549d825079" exitCode=0 Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.332713 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rqktn" event={"ID":"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41","Type":"ContainerStarted","Data":"858fe7c568845f4c55a8c03dec3382290ef6c422cd08d419d67d8b3aee2a814d"} Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.334617 4773 generic.go:334] "Generic (PLEG): container finished" podID="54460d78-28b8-49c7-85ec-d4333db4b86c" containerID="77463d8c190922bd049e9a6f4d1f099161dade201cddd23bc77101b86ec4feb6" exitCode=0 Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.334699 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ef71-account-create-update-2zrtj" event={"ID":"54460d78-28b8-49c7-85ec-d4333db4b86c","Type":"ContainerDied","Data":"77463d8c190922bd049e9a6f4d1f099161dade201cddd23bc77101b86ec4feb6"} Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.335959 4773 generic.go:334] "Generic (PLEG): container finished" podID="a436a8ee-6b7a-4ea5-9056-c289241686e6" containerID="15c4231fb715135dd1011052cf51192cc080b32f3bddb8c8b960f54b1c47020b" exitCode=0 Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.336038 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-5ztvh" event={"ID":"a436a8ee-6b7a-4ea5-9056-c289241686e6","Type":"ContainerDied","Data":"15c4231fb715135dd1011052cf51192cc080b32f3bddb8c8b960f54b1c47020b"} Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.353766 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-rqktn" podStartSLOduration=2.322693067 podStartE2EDuration="34.353743846s" podCreationTimestamp="2026-01-22 12:15:08 +0000 UTC" firstStartedPulling="2026-01-22 12:15:09.267548729 +0000 UTC m=+1216.845664554" lastFinishedPulling="2026-01-22 12:15:41.298599508 +0000 UTC m=+1248.876715333" observedRunningTime="2026-01-22 12:15:42.351364704 +0000 UTC m=+1249.929480549" watchObservedRunningTime="2026-01-22 12:15:42.353743846 +0000 UTC m=+1249.931859671" Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.711542 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.889930 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.896549 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.908093 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.910043 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpgcq\" (UniqueName: \"kubernetes.io/projected/7054d5a6-f005-40f2-bcfe-95835a0aa45d-kube-api-access-wpgcq\") pod \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.910228 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7054d5a6-f005-40f2-bcfe-95835a0aa45d-operator-scripts\") pod \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\" (UID: \"7054d5a6-f005-40f2-bcfe-95835a0aa45d\") " Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.919127 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7054d5a6-f005-40f2-bcfe-95835a0aa45d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7054d5a6-f005-40f2-bcfe-95835a0aa45d" (UID: "7054d5a6-f005-40f2-bcfe-95835a0aa45d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:42 crc kubenswrapper[4773]: I0122 12:15:42.919557 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7054d5a6-f005-40f2-bcfe-95835a0aa45d-kube-api-access-wpgcq" (OuterVolumeSpecName: "kube-api-access-wpgcq") pod "7054d5a6-f005-40f2-bcfe-95835a0aa45d" (UID: "7054d5a6-f005-40f2-bcfe-95835a0aa45d"). InnerVolumeSpecName "kube-api-access-wpgcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.011852 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e1843f0-45c3-4e84-ab98-b7db909e37bd-operator-scripts\") pod \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.011974 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhrdk\" (UniqueName: \"kubernetes.io/projected/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-kube-api-access-xhrdk\") pod \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012029 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0419433e-4ad6-4e7e-acb0-e769c5ba611c-operator-scripts\") pod \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012058 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-operator-scripts\") pod \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\" (UID: \"7fcf03f3-751b-4dd9-a44b-531164a3b4d4\") " Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012084 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csqtc\" (UniqueName: \"kubernetes.io/projected/0419433e-4ad6-4e7e-acb0-e769c5ba611c-kube-api-access-csqtc\") pod \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\" (UID: \"0419433e-4ad6-4e7e-acb0-e769c5ba611c\") " Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012103 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ps4q\" (UniqueName: \"kubernetes.io/projected/4e1843f0-45c3-4e84-ab98-b7db909e37bd-kube-api-access-5ps4q\") pod \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\" (UID: \"4e1843f0-45c3-4e84-ab98-b7db909e37bd\") " Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012524 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpgcq\" (UniqueName: \"kubernetes.io/projected/7054d5a6-f005-40f2-bcfe-95835a0aa45d-kube-api-access-wpgcq\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012544 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7054d5a6-f005-40f2-bcfe-95835a0aa45d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012614 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e1843f0-45c3-4e84-ab98-b7db909e37bd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4e1843f0-45c3-4e84-ab98-b7db909e37bd" (UID: "4e1843f0-45c3-4e84-ab98-b7db909e37bd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012597 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7fcf03f3-751b-4dd9-a44b-531164a3b4d4" (UID: "7fcf03f3-751b-4dd9-a44b-531164a3b4d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.012925 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0419433e-4ad6-4e7e-acb0-e769c5ba611c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0419433e-4ad6-4e7e-acb0-e769c5ba611c" (UID: "0419433e-4ad6-4e7e-acb0-e769c5ba611c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.015702 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0419433e-4ad6-4e7e-acb0-e769c5ba611c-kube-api-access-csqtc" (OuterVolumeSpecName: "kube-api-access-csqtc") pod "0419433e-4ad6-4e7e-acb0-e769c5ba611c" (UID: "0419433e-4ad6-4e7e-acb0-e769c5ba611c"). InnerVolumeSpecName "kube-api-access-csqtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.016977 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e1843f0-45c3-4e84-ab98-b7db909e37bd-kube-api-access-5ps4q" (OuterVolumeSpecName: "kube-api-access-5ps4q") pod "4e1843f0-45c3-4e84-ab98-b7db909e37bd" (UID: "4e1843f0-45c3-4e84-ab98-b7db909e37bd"). InnerVolumeSpecName "kube-api-access-5ps4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.017231 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-kube-api-access-xhrdk" (OuterVolumeSpecName: "kube-api-access-xhrdk") pod "7fcf03f3-751b-4dd9-a44b-531164a3b4d4" (UID: "7fcf03f3-751b-4dd9-a44b-531164a3b4d4"). InnerVolumeSpecName "kube-api-access-xhrdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.113764 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e1843f0-45c3-4e84-ab98-b7db909e37bd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.113807 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhrdk\" (UniqueName: \"kubernetes.io/projected/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-kube-api-access-xhrdk\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.113818 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0419433e-4ad6-4e7e-acb0-e769c5ba611c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.113827 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fcf03f3-751b-4dd9-a44b-531164a3b4d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.113837 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csqtc\" (UniqueName: \"kubernetes.io/projected/0419433e-4ad6-4e7e-acb0-e769c5ba611c-kube-api-access-csqtc\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.113845 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ps4q\" (UniqueName: \"kubernetes.io/projected/4e1843f0-45c3-4e84-ab98-b7db909e37bd-kube-api-access-5ps4q\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.350240 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-zp46d" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.350234 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-zp46d" event={"ID":"7fcf03f3-751b-4dd9-a44b-531164a3b4d4","Type":"ContainerDied","Data":"dc43d7aaa745e2d6d6c8d25b01c85bd82ac6e46f27c18828f449687a086be3d0"} Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.350669 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc43d7aaa745e2d6d6c8d25b01c85bd82ac6e46f27c18828f449687a086be3d0" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.367007 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-sk55j" event={"ID":"4e1843f0-45c3-4e84-ab98-b7db909e37bd","Type":"ContainerDied","Data":"2204090ac02b68b32a14007959e8d3ed2c3b2bbfca585f4dd54b41bb7fdc82b7"} Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.367075 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2204090ac02b68b32a14007959e8d3ed2c3b2bbfca585f4dd54b41bb7fdc82b7" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.367140 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-sk55j" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.369521 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0af4-account-create-update-j924j" event={"ID":"0419433e-4ad6-4e7e-acb0-e769c5ba611c","Type":"ContainerDied","Data":"449fbb8dfc5e74200ebe9fc6b81ff5487eb98aa9730e31379a2aea0752cf0703"} Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.369556 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="449fbb8dfc5e74200ebe9fc6b81ff5487eb98aa9730e31379a2aea0752cf0703" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.369640 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-j924j" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.377700 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-4475-account-create-update-crt4r" Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.379764 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-4475-account-create-update-crt4r" event={"ID":"7054d5a6-f005-40f2-bcfe-95835a0aa45d","Type":"ContainerDied","Data":"38d635626cbeb311aef3be459e0d82bdd658d6edbcd46cbb8ab6a967562899c3"} Jan 22 12:15:43 crc kubenswrapper[4773]: I0122 12:15:43.379818 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38d635626cbeb311aef3be459e0d82bdd658d6edbcd46cbb8ab6a967562899c3" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.325383 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.330739 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.388541 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a436a8ee-6b7a-4ea5-9056-c289241686e6-operator-scripts\") pod \"a436a8ee-6b7a-4ea5-9056-c289241686e6\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.388738 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fvk4\" (UniqueName: \"kubernetes.io/projected/54460d78-28b8-49c7-85ec-d4333db4b86c-kube-api-access-7fvk4\") pod \"54460d78-28b8-49c7-85ec-d4333db4b86c\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.388785 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54460d78-28b8-49c7-85ec-d4333db4b86c-operator-scripts\") pod \"54460d78-28b8-49c7-85ec-d4333db4b86c\" (UID: \"54460d78-28b8-49c7-85ec-d4333db4b86c\") " Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.388828 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dg42\" (UniqueName: \"kubernetes.io/projected/a436a8ee-6b7a-4ea5-9056-c289241686e6-kube-api-access-2dg42\") pod \"a436a8ee-6b7a-4ea5-9056-c289241686e6\" (UID: \"a436a8ee-6b7a-4ea5-9056-c289241686e6\") " Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.390748 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54460d78-28b8-49c7-85ec-d4333db4b86c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "54460d78-28b8-49c7-85ec-d4333db4b86c" (UID: "54460d78-28b8-49c7-85ec-d4333db4b86c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.391526 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a436a8ee-6b7a-4ea5-9056-c289241686e6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a436a8ee-6b7a-4ea5-9056-c289241686e6" (UID: "a436a8ee-6b7a-4ea5-9056-c289241686e6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.395684 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54460d78-28b8-49c7-85ec-d4333db4b86c-kube-api-access-7fvk4" (OuterVolumeSpecName: "kube-api-access-7fvk4") pod "54460d78-28b8-49c7-85ec-d4333db4b86c" (UID: "54460d78-28b8-49c7-85ec-d4333db4b86c"). InnerVolumeSpecName "kube-api-access-7fvk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.411609 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a436a8ee-6b7a-4ea5-9056-c289241686e6-kube-api-access-2dg42" (OuterVolumeSpecName: "kube-api-access-2dg42") pod "a436a8ee-6b7a-4ea5-9056-c289241686e6" (UID: "a436a8ee-6b7a-4ea5-9056-c289241686e6"). InnerVolumeSpecName "kube-api-access-2dg42". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.415850 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-2zrtj" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.415879 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ef71-account-create-update-2zrtj" event={"ID":"54460d78-28b8-49c7-85ec-d4333db4b86c","Type":"ContainerDied","Data":"6887dd3dc0f08a8f0fb433235b8159ea7b606dd55c75cb71c766bc671c73a896"} Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.417707 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6887dd3dc0f08a8f0fb433235b8159ea7b606dd55c75cb71c766bc671c73a896" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.419703 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-5ztvh" event={"ID":"a436a8ee-6b7a-4ea5-9056-c289241686e6","Type":"ContainerDied","Data":"b04839476c80579496a84fb0125fa1dd89bd51157b31d5c34a47e21e9dfd9e91"} Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.419744 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b04839476c80579496a84fb0125fa1dd89bd51157b31d5c34a47e21e9dfd9e91" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.419833 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-5ztvh" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.490012 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a436a8ee-6b7a-4ea5-9056-c289241686e6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.490052 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fvk4\" (UniqueName: \"kubernetes.io/projected/54460d78-28b8-49c7-85ec-d4333db4b86c-kube-api-access-7fvk4\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.490063 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54460d78-28b8-49c7-85ec-d4333db4b86c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:46 crc kubenswrapper[4773]: I0122 12:15:46.490074 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dg42\" (UniqueName: \"kubernetes.io/projected/a436a8ee-6b7a-4ea5-9056-c289241686e6-kube-api-access-2dg42\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:47 crc kubenswrapper[4773]: I0122 12:15:47.431040 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v7xxw" event={"ID":"fd0c3eeb-ff3e-4718-932d-6453b4d6f189","Type":"ContainerStarted","Data":"8d1e9af75c0e1f61a4f35634e8cf84897907c8ed147661c4cf3eed4f653b5990"} Jan 22 12:15:47 crc kubenswrapper[4773]: I0122 12:15:47.468749 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-v7xxw" podStartSLOduration=2.809831733 podStartE2EDuration="8.46872502s" podCreationTimestamp="2026-01-22 12:15:39 +0000 UTC" firstStartedPulling="2026-01-22 12:15:40.514548851 +0000 UTC m=+1248.092664676" lastFinishedPulling="2026-01-22 12:15:46.173442138 +0000 UTC m=+1253.751557963" observedRunningTime="2026-01-22 12:15:47.459858395 +0000 UTC m=+1255.037974220" watchObservedRunningTime="2026-01-22 12:15:47.46872502 +0000 UTC m=+1255.046840845" Jan 22 12:15:47 crc kubenswrapper[4773]: I0122 12:15:47.862550 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:15:47 crc kubenswrapper[4773]: I0122 12:15:47.923831 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-7qmzz"] Jan 22 12:15:47 crc kubenswrapper[4773]: I0122 12:15:47.924191 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerName="dnsmasq-dns" containerID="cri-o://180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6" gracePeriod=10 Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.405761 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.446202 4773 generic.go:334] "Generic (PLEG): container finished" podID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerID="180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6" exitCode=0 Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.447485 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.448037 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" event={"ID":"87cf9391-4cf2-4f02-a532-e15917e46cb4","Type":"ContainerDied","Data":"180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6"} Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.448077 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-7qmzz" event={"ID":"87cf9391-4cf2-4f02-a532-e15917e46cb4","Type":"ContainerDied","Data":"034eacff577dee7d17b843cecc06c7824109ca86057fef0ce427ca8cc1f09f60"} Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.448098 4773 scope.go:117] "RemoveContainer" containerID="180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.475785 4773 scope.go:117] "RemoveContainer" containerID="74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.506272 4773 scope.go:117] "RemoveContainer" containerID="180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6" Jan 22 12:15:48 crc kubenswrapper[4773]: E0122 12:15:48.507096 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6\": container with ID starting with 180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6 not found: ID does not exist" containerID="180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.507132 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6"} err="failed to get container status \"180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6\": rpc error: code = NotFound desc = could not find container \"180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6\": container with ID starting with 180e33a7cc131a748eacd1fe35057742fc50f138d932b50b2709a02c7a42c6c6 not found: ID does not exist" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.507157 4773 scope.go:117] "RemoveContainer" containerID="74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd" Jan 22 12:15:48 crc kubenswrapper[4773]: E0122 12:15:48.507408 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd\": container with ID starting with 74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd not found: ID does not exist" containerID="74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.507434 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd"} err="failed to get container status \"74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd\": rpc error: code = NotFound desc = could not find container \"74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd\": container with ID starting with 74b4d4684664b139139bb3348e4ff958b3867224667c4e0b40ecd5eabab182dd not found: ID does not exist" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.528765 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmfs2\" (UniqueName: \"kubernetes.io/projected/87cf9391-4cf2-4f02-a532-e15917e46cb4-kube-api-access-bmfs2\") pod \"87cf9391-4cf2-4f02-a532-e15917e46cb4\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.528870 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-sb\") pod \"87cf9391-4cf2-4f02-a532-e15917e46cb4\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.528947 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-dns-svc\") pod \"87cf9391-4cf2-4f02-a532-e15917e46cb4\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.529015 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-config\") pod \"87cf9391-4cf2-4f02-a532-e15917e46cb4\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.529733 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-nb\") pod \"87cf9391-4cf2-4f02-a532-e15917e46cb4\" (UID: \"87cf9391-4cf2-4f02-a532-e15917e46cb4\") " Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.536597 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf9391-4cf2-4f02-a532-e15917e46cb4-kube-api-access-bmfs2" (OuterVolumeSpecName: "kube-api-access-bmfs2") pod "87cf9391-4cf2-4f02-a532-e15917e46cb4" (UID: "87cf9391-4cf2-4f02-a532-e15917e46cb4"). InnerVolumeSpecName "kube-api-access-bmfs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.586527 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87cf9391-4cf2-4f02-a532-e15917e46cb4" (UID: "87cf9391-4cf2-4f02-a532-e15917e46cb4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.611776 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87cf9391-4cf2-4f02-a532-e15917e46cb4" (UID: "87cf9391-4cf2-4f02-a532-e15917e46cb4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.629153 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87cf9391-4cf2-4f02-a532-e15917e46cb4" (UID: "87cf9391-4cf2-4f02-a532-e15917e46cb4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.637186 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.637232 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.637246 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.637261 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmfs2\" (UniqueName: \"kubernetes.io/projected/87cf9391-4cf2-4f02-a532-e15917e46cb4-kube-api-access-bmfs2\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.645854 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-config" (OuterVolumeSpecName: "config") pod "87cf9391-4cf2-4f02-a532-e15917e46cb4" (UID: "87cf9391-4cf2-4f02-a532-e15917e46cb4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.739025 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf9391-4cf2-4f02-a532-e15917e46cb4-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.767255 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-7qmzz"] Jan 22 12:15:48 crc kubenswrapper[4773]: I0122 12:15:48.778593 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-7qmzz"] Jan 22 12:15:50 crc kubenswrapper[4773]: I0122 12:15:50.467682 4773 generic.go:334] "Generic (PLEG): container finished" podID="fd0c3eeb-ff3e-4718-932d-6453b4d6f189" containerID="8d1e9af75c0e1f61a4f35634e8cf84897907c8ed147661c4cf3eed4f653b5990" exitCode=0 Jan 22 12:15:50 crc kubenswrapper[4773]: I0122 12:15:50.467738 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v7xxw" event={"ID":"fd0c3eeb-ff3e-4718-932d-6453b4d6f189","Type":"ContainerDied","Data":"8d1e9af75c0e1f61a4f35634e8cf84897907c8ed147661c4cf3eed4f653b5990"} Jan 22 12:15:50 crc kubenswrapper[4773]: I0122 12:15:50.667752 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" path="/var/lib/kubelet/pods/87cf9391-4cf2-4f02-a532-e15917e46cb4/volumes" Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.501453 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rqktn" event={"ID":"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41","Type":"ContainerDied","Data":"858fe7c568845f4c55a8c03dec3382290ef6c422cd08d419d67d8b3aee2a814d"} Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.501411 4773 generic.go:334] "Generic (PLEG): container finished" podID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" containerID="858fe7c568845f4c55a8c03dec3382290ef6c422cd08d419d67d8b3aee2a814d" exitCode=0 Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.796745 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.904693 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-combined-ca-bundle\") pod \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.904778 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87sts\" (UniqueName: \"kubernetes.io/projected/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-kube-api-access-87sts\") pod \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.904825 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-config-data\") pod \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\" (UID: \"fd0c3eeb-ff3e-4718-932d-6453b4d6f189\") " Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.910543 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-kube-api-access-87sts" (OuterVolumeSpecName: "kube-api-access-87sts") pod "fd0c3eeb-ff3e-4718-932d-6453b4d6f189" (UID: "fd0c3eeb-ff3e-4718-932d-6453b4d6f189"). InnerVolumeSpecName "kube-api-access-87sts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.936536 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd0c3eeb-ff3e-4718-932d-6453b4d6f189" (UID: "fd0c3eeb-ff3e-4718-932d-6453b4d6f189"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:51 crc kubenswrapper[4773]: I0122 12:15:51.951326 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-config-data" (OuterVolumeSpecName: "config-data") pod "fd0c3eeb-ff3e-4718-932d-6453b4d6f189" (UID: "fd0c3eeb-ff3e-4718-932d-6453b4d6f189"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.007139 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.007186 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87sts\" (UniqueName: \"kubernetes.io/projected/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-kube-api-access-87sts\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.007207 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd0c3eeb-ff3e-4718-932d-6453b4d6f189-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.513769 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v7xxw" event={"ID":"fd0c3eeb-ff3e-4718-932d-6453b4d6f189","Type":"ContainerDied","Data":"444311f7a849595f6ea1084047b6f78c1d1590ed2a77420c398621ce0923fee5"} Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.513826 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="444311f7a849595f6ea1084047b6f78c1d1590ed2a77420c398621ce0923fee5" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.513821 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v7xxw" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.767701 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-767d96458c-lvflp"] Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768337 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7054d5a6-f005-40f2-bcfe-95835a0aa45d" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768351 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7054d5a6-f005-40f2-bcfe-95835a0aa45d" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768359 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a436a8ee-6b7a-4ea5-9056-c289241686e6" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768366 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a436a8ee-6b7a-4ea5-9056-c289241686e6" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768384 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e1843f0-45c3-4e84-ab98-b7db909e37bd" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768393 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e1843f0-45c3-4e84-ab98-b7db909e37bd" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768405 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd0c3eeb-ff3e-4718-932d-6453b4d6f189" containerName="keystone-db-sync" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768413 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd0c3eeb-ff3e-4718-932d-6453b4d6f189" containerName="keystone-db-sync" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768427 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerName="dnsmasq-dns" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768461 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerName="dnsmasq-dns" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768472 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54460d78-28b8-49c7-85ec-d4333db4b86c" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768482 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="54460d78-28b8-49c7-85ec-d4333db4b86c" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768497 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerName="init" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768505 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerName="init" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768514 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0419433e-4ad6-4e7e-acb0-e769c5ba611c" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768522 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0419433e-4ad6-4e7e-acb0-e769c5ba611c" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: E0122 12:15:52.768534 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fcf03f3-751b-4dd9-a44b-531164a3b4d4" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768543 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fcf03f3-751b-4dd9-a44b-531164a3b4d4" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768728 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fcf03f3-751b-4dd9-a44b-531164a3b4d4" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768742 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0419433e-4ad6-4e7e-acb0-e769c5ba611c" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768755 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="87cf9391-4cf2-4f02-a532-e15917e46cb4" containerName="dnsmasq-dns" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768765 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd0c3eeb-ff3e-4718-932d-6453b4d6f189" containerName="keystone-db-sync" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768776 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e1843f0-45c3-4e84-ab98-b7db909e37bd" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768792 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7054d5a6-f005-40f2-bcfe-95835a0aa45d" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768803 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a436a8ee-6b7a-4ea5-9056-c289241686e6" containerName="mariadb-database-create" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.768813 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="54460d78-28b8-49c7-85ec-d4333db4b86c" containerName="mariadb-account-create-update" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.769605 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.785898 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vtlvs"] Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.787222 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.791099 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.791301 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-92k4l" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.791507 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.791637 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.791757 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.802412 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-lvflp"] Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.826426 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vtlvs"] Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.933741 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-credential-keys\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.933995 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-scripts\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934118 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-svc\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934220 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-config-data\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934590 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934691 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-config\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934794 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934876 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.934953 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmkgr\" (UniqueName: \"kubernetes.io/projected/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-kube-api-access-hmkgr\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.935045 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-combined-ca-bundle\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.935131 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58gk2\" (UniqueName: \"kubernetes.io/projected/f1959a98-6c40-428c-99aa-5342f8c4adc3-kube-api-access-58gk2\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.935218 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-fernet-keys\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.979265 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-vqtpx"] Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.980786 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.992197 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.992515 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 12:15:52 crc kubenswrapper[4773]: I0122 12:15:52.992629 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hsdl7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.003076 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-ld8bk"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.004331 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.008126 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xzrrd" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.008368 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.008495 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.017791 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vqtpx"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.031471 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ld8bk"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055037 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-fernet-keys\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055162 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-credential-keys\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055239 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-scripts\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055299 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-svc\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055327 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-config-data\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055382 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055411 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-config\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055479 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055515 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055531 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmkgr\" (UniqueName: \"kubernetes.io/projected/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-kube-api-access-hmkgr\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055590 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-combined-ca-bundle\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.055636 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58gk2\" (UniqueName: \"kubernetes.io/projected/f1959a98-6c40-428c-99aa-5342f8c4adc3-kube-api-access-58gk2\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.057118 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-config\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.057706 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-svc\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.066319 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.066358 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-scripts\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.066895 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-fernet-keys\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.070074 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-config-data\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.070171 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.070833 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-credential-keys\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.076307 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.101226 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-combined-ca-bundle\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.111075 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58gk2\" (UniqueName: \"kubernetes.io/projected/f1959a98-6c40-428c-99aa-5342f8c4adc3-kube-api-access-58gk2\") pod \"dnsmasq-dns-767d96458c-lvflp\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.118714 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-fg4x7"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.120055 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.127217 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.127222 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-js8m7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.127345 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.130944 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmkgr\" (UniqueName: \"kubernetes.io/projected/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-kube-api-access-hmkgr\") pod \"keystone-bootstrap-vtlvs\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.143233 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fg4x7"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.160804 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-combined-ca-bundle\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.160863 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmw2g\" (UniqueName: \"kubernetes.io/projected/4958bcb5-02cc-4c59-8b85-c386263dd3b8-kube-api-access-rmw2g\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.160910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plpxb\" (UniqueName: \"kubernetes.io/projected/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-kube-api-access-plpxb\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.160934 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-scripts\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.160973 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-config\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.161005 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-combined-ca-bundle\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.161053 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-db-sync-config-data\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.161099 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-config-data\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.161245 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4958bcb5-02cc-4c59-8b85-c386263dd3b8-etc-machine-id\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.161671 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-lvflp"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.188408 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.217823 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-4p9mz"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.221257 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.227250 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.238643 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-dlqk2" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.238843 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265518 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4958bcb5-02cc-4c59-8b85-c386263dd3b8-etc-machine-id\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265560 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-config-data\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265605 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lgqt\" (UniqueName: \"kubernetes.io/projected/e83eccf7-20cf-4226-b994-5d00f3cef915-kube-api-access-4lgqt\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265632 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-combined-ca-bundle\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265677 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-combined-ca-bundle\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265710 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmw2g\" (UniqueName: \"kubernetes.io/projected/4958bcb5-02cc-4c59-8b85-c386263dd3b8-kube-api-access-rmw2g\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265746 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plpxb\" (UniqueName: \"kubernetes.io/projected/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-kube-api-access-plpxb\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265765 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-scripts\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265789 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83eccf7-20cf-4226-b994-5d00f3cef915-logs\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265812 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-scripts\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265833 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-config-data\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265862 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-config\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265897 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-combined-ca-bundle\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.265962 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-db-sync-config-data\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.267790 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4958bcb5-02cc-4c59-8b85-c386263dd3b8-etc-machine-id\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.272652 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4p9mz"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.283352 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-combined-ca-bundle\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.289177 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-config\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.295758 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-config-data\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.295861 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-sq429"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.296796 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-db-sync-config-data\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: E0122 12:15:53.297469 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" containerName="glance-db-sync" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.297491 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" containerName="glance-db-sync" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.297982 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" containerName="glance-db-sync" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.299804 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-scripts\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.300154 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.304130 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-combined-ca-bundle\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.329624 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plpxb\" (UniqueName: \"kubernetes.io/projected/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-kube-api-access-plpxb\") pod \"neutron-db-sync-vqtpx\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.333996 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmw2g\" (UniqueName: \"kubernetes.io/projected/4958bcb5-02cc-4c59-8b85-c386263dd3b8-kube-api-access-rmw2g\") pod \"cinder-db-sync-ld8bk\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.334065 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-sq429"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.349468 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.353950 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.360924 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.360999 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.366837 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-db-sync-config-data\") pod \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.366916 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fblcd\" (UniqueName: \"kubernetes.io/projected/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-kube-api-access-fblcd\") pod \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.366991 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-config-data\") pod \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367138 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-combined-ca-bundle\") pod \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\" (UID: \"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41\") " Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367390 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-combined-ca-bundle\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367490 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-db-sync-config-data\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367529 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lgqt\" (UniqueName: \"kubernetes.io/projected/e83eccf7-20cf-4226-b994-5d00f3cef915-kube-api-access-4lgqt\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367554 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-combined-ca-bundle\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367596 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjhkr\" (UniqueName: \"kubernetes.io/projected/93c45ab9-7e10-4217-93fa-5d801fb55c6d-kube-api-access-rjhkr\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367637 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-scripts\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367667 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83eccf7-20cf-4226-b994-5d00f3cef915-logs\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.367687 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-config-data\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.368646 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83eccf7-20cf-4226-b994-5d00f3cef915-logs\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.372477 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-kube-api-access-fblcd" (OuterVolumeSpecName: "kube-api-access-fblcd") pod "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" (UID: "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41"). InnerVolumeSpecName "kube-api-access-fblcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.374564 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-config-data\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.381826 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-combined-ca-bundle\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.387234 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.389607 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-scripts\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.394080 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" (UID: "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.410543 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.412595 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lgqt\" (UniqueName: \"kubernetes.io/projected/e83eccf7-20cf-4226-b994-5d00f3cef915-kube-api-access-4lgqt\") pod \"placement-db-sync-fg4x7\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.415783 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" (UID: "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.452897 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-config-data" (OuterVolumeSpecName: "config-data") pod "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" (UID: "f9fe524c-8ecb-4e39-a1f6-2544aeb74b41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469505 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469582 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-config-data\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469604 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469637 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469664 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-db-sync-config-data\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469772 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469837 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-scripts\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469951 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjhkr\" (UniqueName: \"kubernetes.io/projected/93c45ab9-7e10-4217-93fa-5d801fb55c6d-kube-api-access-rjhkr\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469979 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.469999 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-log-httpd\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470160 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-combined-ca-bundle\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470202 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470240 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-config\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470332 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-run-httpd\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470396 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6t7c\" (UniqueName: \"kubernetes.io/projected/5e872a34-92db-48b1-83b7-adc3dcc940d1-kube-api-access-d6t7c\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470445 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnpdg\" (UniqueName: \"kubernetes.io/projected/7ce9300d-fcb4-43e9-810f-5000109e6799-kube-api-access-rnpdg\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470519 4773 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470538 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fblcd\" (UniqueName: \"kubernetes.io/projected/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-kube-api-access-fblcd\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470554 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.470567 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.473450 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-combined-ca-bundle\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.474660 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-db-sync-config-data\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.502004 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjhkr\" (UniqueName: \"kubernetes.io/projected/93c45ab9-7e10-4217-93fa-5d801fb55c6d-kube-api-access-rjhkr\") pod \"barbican-db-sync-4p9mz\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.546755 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rqktn" event={"ID":"f9fe524c-8ecb-4e39-a1f6-2544aeb74b41","Type":"ContainerDied","Data":"ba40fa810d9b613dd57ab64a71d22eb9c7e8adc60a4fdee5ba5838f13a0649dd"} Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.546797 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba40fa810d9b613dd57ab64a71d22eb9c7e8adc60a4fdee5ba5838f13a0649dd" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.546859 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rqktn" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.574876 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.574935 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-log-httpd\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575058 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575087 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-config\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575159 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-run-httpd\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575190 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6t7c\" (UniqueName: \"kubernetes.io/projected/5e872a34-92db-48b1-83b7-adc3dcc940d1-kube-api-access-d6t7c\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575211 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnpdg\" (UniqueName: \"kubernetes.io/projected/7ce9300d-fcb4-43e9-810f-5000109e6799-kube-api-access-rnpdg\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575230 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575276 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-config-data\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575318 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575361 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575433 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.575451 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-scripts\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.576717 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-log-httpd\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.577767 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-run-httpd\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.581273 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.581447 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-config\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.582842 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.582885 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.599681 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.607934 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.608002 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-scripts\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.608315 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.608436 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-config-data\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.612926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6t7c\" (UniqueName: \"kubernetes.io/projected/5e872a34-92db-48b1-83b7-adc3dcc940d1-kube-api-access-d6t7c\") pod \"dnsmasq-dns-7fc6d4ffc7-sq429\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.614015 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnpdg\" (UniqueName: \"kubernetes.io/projected/7ce9300d-fcb4-43e9-810f-5000109e6799-kube-api-access-rnpdg\") pod \"ceilometer-0\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.623014 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.632563 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.638723 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fg4x7" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.660371 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.660891 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.685686 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.802831 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-lvflp"] Jan 22 12:15:53 crc kubenswrapper[4773]: I0122 12:15:53.982819 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vtlvs"] Jan 22 12:15:54 crc kubenswrapper[4773]: W0122 12:15:54.080549 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02d9a5f0_dc27_4a71_a89a_ebd7dc9f3de1.slice/crio-ffe5bf0e4e69f5de240c7149ff95ab31be9bb0058df203ce824465b7bccf2724 WatchSource:0}: Error finding container ffe5bf0e4e69f5de240c7149ff95ab31be9bb0058df203ce824465b7bccf2724: Status 404 returned error can't find the container with id ffe5bf0e4e69f5de240c7149ff95ab31be9bb0058df203ce824465b7bccf2724 Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.240951 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-sq429"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.301356 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-fgdwh"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.302988 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.350777 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-vqtpx"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.373582 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-fgdwh"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.408308 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.408355 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-config\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.408383 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.408407 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.409444 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjxnv\" (UniqueName: \"kubernetes.io/projected/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-kube-api-access-wjxnv\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.409603 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.511081 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjxnv\" (UniqueName: \"kubernetes.io/projected/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-kube-api-access-wjxnv\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.511159 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.511318 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.511352 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-config\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.511383 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.511412 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.512638 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.512691 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-config\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.512727 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.513535 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.513982 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.532539 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjxnv\" (UniqueName: \"kubernetes.io/projected/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-kube-api-access-wjxnv\") pod \"dnsmasq-dns-6f6f8cb849-fgdwh\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.570325 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-lvflp" event={"ID":"f1959a98-6c40-428c-99aa-5342f8c4adc3","Type":"ContainerStarted","Data":"90d2f60b1ac20ae104b14aee693cc1056130eff36d231af1ef8b393d7dd454ab"} Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.572822 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vqtpx" event={"ID":"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5","Type":"ContainerStarted","Data":"656db3c7383c4324e10859c52697dcac61977560cb382dbbc9876229554ef8ac"} Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.574772 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtlvs" event={"ID":"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1","Type":"ContainerStarted","Data":"ffe5bf0e4e69f5de240c7149ff95ab31be9bb0058df203ce824465b7bccf2724"} Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.601699 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-ld8bk"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.765850 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-4p9mz"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.770676 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:54 crc kubenswrapper[4773]: W0122 12:15:54.835961 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93c45ab9_7e10_4217_93fa_5d801fb55c6d.slice/crio-45352a9351ded2c7335de28dc951e32646ccb1dcd868d068bfb4331ff28fb486 WatchSource:0}: Error finding container 45352a9351ded2c7335de28dc951e32646ccb1dcd868d068bfb4331ff28fb486: Status 404 returned error can't find the container with id 45352a9351ded2c7335de28dc951e32646ccb1dcd868d068bfb4331ff28fb486 Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.970005 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-sq429"] Jan 22 12:15:54 crc kubenswrapper[4773]: I0122 12:15:54.994826 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.019903 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-fg4x7"] Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.206566 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.208033 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.215123 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tmks6" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.215474 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.216185 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.225125 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543102 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543213 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543267 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543343 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-logs\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543384 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543419 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.543456 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jz6j\" (UniqueName: \"kubernetes.io/projected/b0598514-17c4-4af5-879b-b93cee40e6d1-kube-api-access-6jz6j\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.558420 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.560112 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.562824 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 12:15:55 crc kubenswrapper[4773]: I0122 12:15:55.565850 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069125 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jz6j\" (UniqueName: \"kubernetes.io/projected/b0598514-17c4-4af5-879b-b93cee40e6d1-kube-api-access-6jz6j\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069396 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069466 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069496 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069536 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-logs\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069558 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069591 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.069771 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.071634 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.071677 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-logs\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.077102 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.094409 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.103653 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.127874 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jz6j\" (UniqueName: \"kubernetes.io/projected/b0598514-17c4-4af5-879b-b93cee40e6d1-kube-api-access-6jz6j\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.135153 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-fgdwh"] Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.176714 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.176837 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-logs\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.176951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.176974 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.177007 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfrjr\" (UniqueName: \"kubernetes.io/projected/6d95631d-1cd5-461f-bb82-541344621973-kube-api-access-sfrjr\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.177029 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.177048 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.180687 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerStarted","Data":"20ed79f8166a789fb245b971a14affde6c137759c88d76160fef3496ce5905b3"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.182770 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" event={"ID":"5e872a34-92db-48b1-83b7-adc3dcc940d1","Type":"ContainerStarted","Data":"87df1bd5dbd97e8fc5e6153b1ac6979150589d15ea48ede729dc81e8a6749a65"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.186351 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtlvs" event={"ID":"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1","Type":"ContainerStarted","Data":"022567fecc826144588132a7fe6f8b3c18be6d50e4f697d1e29bf495c3931bd5"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.192933 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fg4x7" event={"ID":"e83eccf7-20cf-4226-b994-5d00f3cef915","Type":"ContainerStarted","Data":"7f5e0d4b47b581eed1590fb0bb72c95c5a3d7979faebe1f8037d187fc5f89db4"} Jan 22 12:15:56 crc kubenswrapper[4773]: W0122 12:15:56.194434 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5dcbb5f2_81aa_41f4_be2c_9dd41c074f99.slice/crio-5ba715668cd0bb299e779bdb70c92553dfccdc7587463efb80d9b8b89023d1d9 WatchSource:0}: Error finding container 5ba715668cd0bb299e779bdb70c92553dfccdc7587463efb80d9b8b89023d1d9: Status 404 returned error can't find the container with id 5ba715668cd0bb299e779bdb70c92553dfccdc7587463efb80d9b8b89023d1d9 Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.195517 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ld8bk" event={"ID":"4958bcb5-02cc-4c59-8b85-c386263dd3b8","Type":"ContainerStarted","Data":"63a14ed4e5f6d8891dd2093bdd3446b4e3a1e85748b41926b6f3295f4c72973b"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.198424 4773 generic.go:334] "Generic (PLEG): container finished" podID="f1959a98-6c40-428c-99aa-5342f8c4adc3" containerID="fe3d2fea094194142feccc27e365cf18b4fb0d1e35c62611555765c12140b36b" exitCode=0 Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.198589 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-lvflp" event={"ID":"f1959a98-6c40-428c-99aa-5342f8c4adc3","Type":"ContainerDied","Data":"fe3d2fea094194142feccc27e365cf18b4fb0d1e35c62611555765c12140b36b"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.204919 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.228043 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vtlvs" podStartSLOduration=4.228023752 podStartE2EDuration="4.228023752s" podCreationTimestamp="2026-01-22 12:15:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:15:56.215059514 +0000 UTC m=+1263.793175349" watchObservedRunningTime="2026-01-22 12:15:56.228023752 +0000 UTC m=+1263.806139577" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.243683 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4p9mz" event={"ID":"93c45ab9-7e10-4217-93fa-5d801fb55c6d","Type":"ContainerStarted","Data":"45352a9351ded2c7335de28dc951e32646ccb1dcd868d068bfb4331ff28fb486"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.259580 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vqtpx" event={"ID":"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5","Type":"ContainerStarted","Data":"83c0def5b56b44cb9f1f01205a511238aa72173c21dd369de734bca93e5a1030"} Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.278729 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfrjr\" (UniqueName: \"kubernetes.io/projected/6d95631d-1cd5-461f-bb82-541344621973-kube-api-access-sfrjr\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.278880 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.278953 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.279705 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.282976 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.283370 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-logs\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.283514 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.283569 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.284700 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.286540 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.287176 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-logs\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.289647 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.300748 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.305653 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-vqtpx" podStartSLOduration=4.305627917 podStartE2EDuration="4.305627917s" podCreationTimestamp="2026-01-22 12:15:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:15:56.285170394 +0000 UTC m=+1263.863286219" watchObservedRunningTime="2026-01-22 12:15:56.305627917 +0000 UTC m=+1263.883743742" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.307029 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfrjr\" (UniqueName: \"kubernetes.io/projected/6d95631d-1cd5-461f-bb82-541344621973-kube-api-access-sfrjr\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.333381 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.417503 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.459452 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.642257 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.694942 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58gk2\" (UniqueName: \"kubernetes.io/projected/f1959a98-6c40-428c-99aa-5342f8c4adc3-kube-api-access-58gk2\") pod \"f1959a98-6c40-428c-99aa-5342f8c4adc3\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.695061 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-config\") pod \"f1959a98-6c40-428c-99aa-5342f8c4adc3\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.695106 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-svc\") pod \"f1959a98-6c40-428c-99aa-5342f8c4adc3\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.695176 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-sb\") pod \"f1959a98-6c40-428c-99aa-5342f8c4adc3\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.695229 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-nb\") pod \"f1959a98-6c40-428c-99aa-5342f8c4adc3\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.695278 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-swift-storage-0\") pod \"f1959a98-6c40-428c-99aa-5342f8c4adc3\" (UID: \"f1959a98-6c40-428c-99aa-5342f8c4adc3\") " Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.709331 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1959a98-6c40-428c-99aa-5342f8c4adc3-kube-api-access-58gk2" (OuterVolumeSpecName: "kube-api-access-58gk2") pod "f1959a98-6c40-428c-99aa-5342f8c4adc3" (UID: "f1959a98-6c40-428c-99aa-5342f8c4adc3"). InnerVolumeSpecName "kube-api-access-58gk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.727047 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f1959a98-6c40-428c-99aa-5342f8c4adc3" (UID: "f1959a98-6c40-428c-99aa-5342f8c4adc3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.732041 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f1959a98-6c40-428c-99aa-5342f8c4adc3" (UID: "f1959a98-6c40-428c-99aa-5342f8c4adc3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.794998 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-config" (OuterVolumeSpecName: "config") pod "f1959a98-6c40-428c-99aa-5342f8c4adc3" (UID: "f1959a98-6c40-428c-99aa-5342f8c4adc3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.796914 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f1959a98-6c40-428c-99aa-5342f8c4adc3" (UID: "f1959a98-6c40-428c-99aa-5342f8c4adc3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.797404 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.797455 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.797474 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58gk2\" (UniqueName: \"kubernetes.io/projected/f1959a98-6c40-428c-99aa-5342f8c4adc3-kube-api-access-58gk2\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.797488 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.797499 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.797787 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f1959a98-6c40-428c-99aa-5342f8c4adc3" (UID: "f1959a98-6c40-428c-99aa-5342f8c4adc3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.875047 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.899442 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f1959a98-6c40-428c-99aa-5342f8c4adc3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:56 crc kubenswrapper[4773]: I0122 12:15:56.960540 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.016031 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.279434 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.318480 4773 generic.go:334] "Generic (PLEG): container finished" podID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerID="3727c12b7dcc27228baf58bdefa1203496c5c97590da3ec6a1e744ce45b8a029" exitCode=0 Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.318575 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" event={"ID":"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99","Type":"ContainerDied","Data":"3727c12b7dcc27228baf58bdefa1203496c5c97590da3ec6a1e744ce45b8a029"} Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.318610 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" event={"ID":"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99","Type":"ContainerStarted","Data":"5ba715668cd0bb299e779bdb70c92553dfccdc7587463efb80d9b8b89023d1d9"} Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.325077 4773 generic.go:334] "Generic (PLEG): container finished" podID="5e872a34-92db-48b1-83b7-adc3dcc940d1" containerID="03c6ad5bc7d41e701b1a1e0a8b39d671100656faffbd641cb50b476dddc5ef96" exitCode=0 Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.325146 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" event={"ID":"5e872a34-92db-48b1-83b7-adc3dcc940d1","Type":"ContainerDied","Data":"03c6ad5bc7d41e701b1a1e0a8b39d671100656faffbd641cb50b476dddc5ef96"} Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.334046 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-lvflp" event={"ID":"f1959a98-6c40-428c-99aa-5342f8c4adc3","Type":"ContainerDied","Data":"90d2f60b1ac20ae104b14aee693cc1056130eff36d231af1ef8b393d7dd454ab"} Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.334123 4773 scope.go:117] "RemoveContainer" containerID="fe3d2fea094194142feccc27e365cf18b4fb0d1e35c62611555765c12140b36b" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.334327 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-lvflp" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.530093 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.577963 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-lvflp"] Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.586941 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-lvflp"] Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.811444 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.845535 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-swift-storage-0\") pod \"5e872a34-92db-48b1-83b7-adc3dcc940d1\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.845665 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-nb\") pod \"5e872a34-92db-48b1-83b7-adc3dcc940d1\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.845751 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-svc\") pod \"5e872a34-92db-48b1-83b7-adc3dcc940d1\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.845781 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-config\") pod \"5e872a34-92db-48b1-83b7-adc3dcc940d1\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.845835 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6t7c\" (UniqueName: \"kubernetes.io/projected/5e872a34-92db-48b1-83b7-adc3dcc940d1-kube-api-access-d6t7c\") pod \"5e872a34-92db-48b1-83b7-adc3dcc940d1\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.845874 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-sb\") pod \"5e872a34-92db-48b1-83b7-adc3dcc940d1\" (UID: \"5e872a34-92db-48b1-83b7-adc3dcc940d1\") " Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.866540 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e872a34-92db-48b1-83b7-adc3dcc940d1-kube-api-access-d6t7c" (OuterVolumeSpecName: "kube-api-access-d6t7c") pod "5e872a34-92db-48b1-83b7-adc3dcc940d1" (UID: "5e872a34-92db-48b1-83b7-adc3dcc940d1"). InnerVolumeSpecName "kube-api-access-d6t7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.876251 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e872a34-92db-48b1-83b7-adc3dcc940d1" (UID: "5e872a34-92db-48b1-83b7-adc3dcc940d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.881271 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5e872a34-92db-48b1-83b7-adc3dcc940d1" (UID: "5e872a34-92db-48b1-83b7-adc3dcc940d1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.888237 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-config" (OuterVolumeSpecName: "config") pod "5e872a34-92db-48b1-83b7-adc3dcc940d1" (UID: "5e872a34-92db-48b1-83b7-adc3dcc940d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.893403 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5e872a34-92db-48b1-83b7-adc3dcc940d1" (UID: "5e872a34-92db-48b1-83b7-adc3dcc940d1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.908309 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5e872a34-92db-48b1-83b7-adc3dcc940d1" (UID: "5e872a34-92db-48b1-83b7-adc3dcc940d1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.950418 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6t7c\" (UniqueName: \"kubernetes.io/projected/5e872a34-92db-48b1-83b7-adc3dcc940d1-kube-api-access-d6t7c\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.950719 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.950731 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.950741 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.950750 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:57 crc kubenswrapper[4773]: I0122 12:15:57.950760 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e872a34-92db-48b1-83b7-adc3dcc940d1-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.348420 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6d95631d-1cd5-461f-bb82-541344621973","Type":"ContainerStarted","Data":"67a07b08f47ebc05dcc33d4e0bf13156de7a0bae5443f377b61bcab73b452f79"} Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.351277 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0598514-17c4-4af5-879b-b93cee40e6d1","Type":"ContainerStarted","Data":"bc0a9b82a87786e5c998b076d8c3e7e46bfbe98eb3efaa03a22379184e12181f"} Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.357998 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" event={"ID":"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99","Type":"ContainerStarted","Data":"e03f6de4c0dcdbbb21483a33d8dda8eb301f643f04d7aaa6df94dfd9ec3c8d11"} Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.359194 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.364500 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" event={"ID":"5e872a34-92db-48b1-83b7-adc3dcc940d1","Type":"ContainerDied","Data":"87df1bd5dbd97e8fc5e6153b1ac6979150589d15ea48ede729dc81e8a6749a65"} Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.364535 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-sq429" Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.364579 4773 scope.go:117] "RemoveContainer" containerID="03c6ad5bc7d41e701b1a1e0a8b39d671100656faffbd641cb50b476dddc5ef96" Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.383359 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" podStartSLOduration=4.383337204 podStartE2EDuration="4.383337204s" podCreationTimestamp="2026-01-22 12:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:15:58.376555501 +0000 UTC m=+1265.954671326" watchObservedRunningTime="2026-01-22 12:15:58.383337204 +0000 UTC m=+1265.961453019" Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.492474 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-sq429"] Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.501748 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-sq429"] Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.679160 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e872a34-92db-48b1-83b7-adc3dcc940d1" path="/var/lib/kubelet/pods/5e872a34-92db-48b1-83b7-adc3dcc940d1/volumes" Jan 22 12:15:58 crc kubenswrapper[4773]: I0122 12:15:58.681236 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1959a98-6c40-428c-99aa-5342f8c4adc3" path="/var/lib/kubelet/pods/f1959a98-6c40-428c-99aa-5342f8c4adc3/volumes" Jan 22 12:15:59 crc kubenswrapper[4773]: I0122 12:15:59.431831 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6d95631d-1cd5-461f-bb82-541344621973","Type":"ContainerStarted","Data":"91abf7692787e0f6b27c2a534bf19bf8916cd71be0da7570473523b1c28acafd"} Jan 22 12:15:59 crc kubenswrapper[4773]: I0122 12:15:59.435387 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0598514-17c4-4af5-879b-b93cee40e6d1","Type":"ContainerStarted","Data":"989908dd66422bae3ea5f3954f52b791fb611e8c99d00833e9706afd190e436d"} Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.450412 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6d95631d-1cd5-461f-bb82-541344621973","Type":"ContainerStarted","Data":"854a7d431a7c49e3c06c6f46c1bdc8b8a489f28e77572d4bd867f5bbc9ed6b6a"} Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.450701 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-log" containerID="cri-o://91abf7692787e0f6b27c2a534bf19bf8916cd71be0da7570473523b1c28acafd" gracePeriod=30 Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.450952 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-httpd" containerID="cri-o://854a7d431a7c49e3c06c6f46c1bdc8b8a489f28e77572d4bd867f5bbc9ed6b6a" gracePeriod=30 Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.457062 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-log" containerID="cri-o://989908dd66422bae3ea5f3954f52b791fb611e8c99d00833e9706afd190e436d" gracePeriod=30 Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.457251 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-httpd" containerID="cri-o://86d7600d73732ee868aaca91d39d7af45d1a50dd038aef87a6069d612d741811" gracePeriod=30 Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.457355 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0598514-17c4-4af5-879b-b93cee40e6d1","Type":"ContainerStarted","Data":"86d7600d73732ee868aaca91d39d7af45d1a50dd038aef87a6069d612d741811"} Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.505557 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.505531075 podStartE2EDuration="6.505531075s" podCreationTimestamp="2026-01-22 12:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:00.501732707 +0000 UTC m=+1268.079848552" watchObservedRunningTime="2026-01-22 12:16:00.505531075 +0000 UTC m=+1268.083646910" Jan 22 12:16:00 crc kubenswrapper[4773]: I0122 12:16:00.508504 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.508484019 podStartE2EDuration="6.508484019s" podCreationTimestamp="2026-01-22 12:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:00.480174463 +0000 UTC m=+1268.058290288" watchObservedRunningTime="2026-01-22 12:16:00.508484019 +0000 UTC m=+1268.086599854" Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.467907 4773 generic.go:334] "Generic (PLEG): container finished" podID="02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" containerID="022567fecc826144588132a7fe6f8b3c18be6d50e4f697d1e29bf495c3931bd5" exitCode=0 Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.468114 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtlvs" event={"ID":"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1","Type":"ContainerDied","Data":"022567fecc826144588132a7fe6f8b3c18be6d50e4f697d1e29bf495c3931bd5"} Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.471324 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d95631d-1cd5-461f-bb82-541344621973" containerID="854a7d431a7c49e3c06c6f46c1bdc8b8a489f28e77572d4bd867f5bbc9ed6b6a" exitCode=143 Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.471348 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d95631d-1cd5-461f-bb82-541344621973" containerID="91abf7692787e0f6b27c2a534bf19bf8916cd71be0da7570473523b1c28acafd" exitCode=143 Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.471392 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6d95631d-1cd5-461f-bb82-541344621973","Type":"ContainerDied","Data":"854a7d431a7c49e3c06c6f46c1bdc8b8a489f28e77572d4bd867f5bbc9ed6b6a"} Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.471418 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6d95631d-1cd5-461f-bb82-541344621973","Type":"ContainerDied","Data":"91abf7692787e0f6b27c2a534bf19bf8916cd71be0da7570473523b1c28acafd"} Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.480097 4773 generic.go:334] "Generic (PLEG): container finished" podID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerID="86d7600d73732ee868aaca91d39d7af45d1a50dd038aef87a6069d612d741811" exitCode=143 Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.480144 4773 generic.go:334] "Generic (PLEG): container finished" podID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerID="989908dd66422bae3ea5f3954f52b791fb611e8c99d00833e9706afd190e436d" exitCode=143 Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.480173 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0598514-17c4-4af5-879b-b93cee40e6d1","Type":"ContainerDied","Data":"86d7600d73732ee868aaca91d39d7af45d1a50dd038aef87a6069d612d741811"} Jan 22 12:16:01 crc kubenswrapper[4773]: I0122 12:16:01.480212 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0598514-17c4-4af5-879b-b93cee40e6d1","Type":"ContainerDied","Data":"989908dd66422bae3ea5f3954f52b791fb611e8c99d00833e9706afd190e436d"} Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.089553 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.090244 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-config-data\") pod \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.090315 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-combined-ca-bundle\") pod \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.090353 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-credential-keys\") pod \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.090424 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmkgr\" (UniqueName: \"kubernetes.io/projected/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-kube-api-access-hmkgr\") pod \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.090452 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-fernet-keys\") pod \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.090559 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-scripts\") pod \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\" (UID: \"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1\") " Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.097762 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-kube-api-access-hmkgr" (OuterVolumeSpecName: "kube-api-access-hmkgr") pod "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" (UID: "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1"). InnerVolumeSpecName "kube-api-access-hmkgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.098224 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" (UID: "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.116263 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-scripts" (OuterVolumeSpecName: "scripts") pod "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" (UID: "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.119391 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-config-data" (OuterVolumeSpecName: "config-data") pod "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" (UID: "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.134410 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" (UID: "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.192151 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.192191 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.192205 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.192215 4773 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.192226 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmkgr\" (UniqueName: \"kubernetes.io/projected/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-kube-api-access-hmkgr\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.223565 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" (UID: "02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.293892 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.507191 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vtlvs" event={"ID":"02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1","Type":"ContainerDied","Data":"ffe5bf0e4e69f5de240c7149ff95ab31be9bb0058df203ce824465b7bccf2724"} Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.507256 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffe5bf0e4e69f5de240c7149ff95ab31be9bb0058df203ce824465b7bccf2724" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.507351 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vtlvs" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.579637 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vtlvs"] Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.588558 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vtlvs"] Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.716579 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-pjn7v"] Jan 22 12:16:03 crc kubenswrapper[4773]: E0122 12:16:03.717054 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e872a34-92db-48b1-83b7-adc3dcc940d1" containerName="init" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.717075 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e872a34-92db-48b1-83b7-adc3dcc940d1" containerName="init" Jan 22 12:16:03 crc kubenswrapper[4773]: E0122 12:16:03.717094 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1959a98-6c40-428c-99aa-5342f8c4adc3" containerName="init" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.717101 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1959a98-6c40-428c-99aa-5342f8c4adc3" containerName="init" Jan 22 12:16:03 crc kubenswrapper[4773]: E0122 12:16:03.717112 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" containerName="keystone-bootstrap" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.717120 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" containerName="keystone-bootstrap" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.717354 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e872a34-92db-48b1-83b7-adc3dcc940d1" containerName="init" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.717380 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1959a98-6c40-428c-99aa-5342f8c4adc3" containerName="init" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.717393 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" containerName="keystone-bootstrap" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.718066 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.723887 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pjn7v"] Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.743321 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.743925 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.744014 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.744127 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.744350 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-92k4l" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.910502 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-config-data\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.910580 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-fernet-keys\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.910609 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-combined-ca-bundle\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.910684 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-scripts\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.910805 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5lzk\" (UniqueName: \"kubernetes.io/projected/209de0ff-2316-479c-8ff1-62e1ed260807-kube-api-access-f5lzk\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:03 crc kubenswrapper[4773]: I0122 12:16:03.910851 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-credential-keys\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.012887 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-scripts\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.012956 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5lzk\" (UniqueName: \"kubernetes.io/projected/209de0ff-2316-479c-8ff1-62e1ed260807-kube-api-access-f5lzk\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.013004 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-credential-keys\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.013116 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-config-data\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.013148 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-fernet-keys\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.013173 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-combined-ca-bundle\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.020321 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-credential-keys\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.020799 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-config-data\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.020812 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-fernet-keys\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.021411 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-combined-ca-bundle\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.023556 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-scripts\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.030416 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5lzk\" (UniqueName: \"kubernetes.io/projected/209de0ff-2316-479c-8ff1-62e1ed260807-kube-api-access-f5lzk\") pod \"keystone-bootstrap-pjn7v\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.035068 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.675582 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1" path="/var/lib/kubelet/pods/02d9a5f0-dc27-4a71-a89a-ebd7dc9f3de1/volumes" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.772580 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.850131 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-mm9ts"] Jan 22 12:16:04 crc kubenswrapper[4773]: I0122 12:16:04.850433 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="dnsmasq-dns" containerID="cri-o://9f58fa38a9d9aa771343f7e1bb26057f5ba9c615f90418c8ff99eab8a04440e2" gracePeriod=10 Jan 22 12:16:05 crc kubenswrapper[4773]: I0122 12:16:05.526550 4773 generic.go:334] "Generic (PLEG): container finished" podID="c9b10ad1-cb79-4231-b540-04a494438839" containerID="9f58fa38a9d9aa771343f7e1bb26057f5ba9c615f90418c8ff99eab8a04440e2" exitCode=0 Jan 22 12:16:05 crc kubenswrapper[4773]: I0122 12:16:05.526619 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" event={"ID":"c9b10ad1-cb79-4231-b540-04a494438839","Type":"ContainerDied","Data":"9f58fa38a9d9aa771343f7e1bb26057f5ba9c615f90418c8ff99eab8a04440e2"} Jan 22 12:16:07 crc kubenswrapper[4773]: I0122 12:16:07.864074 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.508764 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.511747 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.574567 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b0598514-17c4-4af5-879b-b93cee40e6d1","Type":"ContainerDied","Data":"bc0a9b82a87786e5c998b076d8c3e7e46bfbe98eb3efaa03a22379184e12181f"} Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.574621 4773 scope.go:117] "RemoveContainer" containerID="86d7600d73732ee868aaca91d39d7af45d1a50dd038aef87a6069d612d741811" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.574747 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.578760 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6d95631d-1cd5-461f-bb82-541344621973","Type":"ContainerDied","Data":"67a07b08f47ebc05dcc33d4e0bf13156de7a0bae5443f377b61bcab73b452f79"} Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.578830 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673617 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-httpd-run\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673663 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-config-data\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673734 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673763 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673788 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-logs\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673804 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-logs\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673828 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-combined-ca-bundle\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673906 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jz6j\" (UniqueName: \"kubernetes.io/projected/b0598514-17c4-4af5-879b-b93cee40e6d1-kube-api-access-6jz6j\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673931 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-combined-ca-bundle\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673946 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-scripts\") pod \"b0598514-17c4-4af5-879b-b93cee40e6d1\" (UID: \"b0598514-17c4-4af5-879b-b93cee40e6d1\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.673985 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfrjr\" (UniqueName: \"kubernetes.io/projected/6d95631d-1cd5-461f-bb82-541344621973-kube-api-access-sfrjr\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.674017 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-httpd-run\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.674820 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-logs" (OuterVolumeSpecName: "logs") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.675697 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-config-data\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.675746 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-scripts\") pod \"6d95631d-1cd5-461f-bb82-541344621973\" (UID: \"6d95631d-1cd5-461f-bb82-541344621973\") " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.676009 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.676039 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-logs" (OuterVolumeSpecName: "logs") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.676277 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.678384 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.682731 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-scripts" (OuterVolumeSpecName: "scripts") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.683658 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-scripts" (OuterVolumeSpecName: "scripts") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.683786 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.683825 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.686486 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d95631d-1cd5-461f-bb82-541344621973-kube-api-access-sfrjr" (OuterVolumeSpecName: "kube-api-access-sfrjr") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "kube-api-access-sfrjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.691206 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0598514-17c4-4af5-879b-b93cee40e6d1-kube-api-access-6jz6j" (OuterVolumeSpecName: "kube-api-access-6jz6j") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "kube-api-access-6jz6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.704921 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.710027 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.727973 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-config-data" (OuterVolumeSpecName: "config-data") pod "6d95631d-1cd5-461f-bb82-541344621973" (UID: "6d95631d-1cd5-461f-bb82-541344621973"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.734567 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-config-data" (OuterVolumeSpecName: "config-data") pod "b0598514-17c4-4af5-879b-b93cee40e6d1" (UID: "b0598514-17c4-4af5-879b-b93cee40e6d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779482 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b0598514-17c4-4af5-879b-b93cee40e6d1-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779528 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779570 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779591 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779605 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779616 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779633 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jz6j\" (UniqueName: \"kubernetes.io/projected/b0598514-17c4-4af5-879b-b93cee40e6d1-kube-api-access-6jz6j\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779647 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0598514-17c4-4af5-879b-b93cee40e6d1-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779660 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779672 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfrjr\" (UniqueName: \"kubernetes.io/projected/6d95631d-1cd5-461f-bb82-541344621973-kube-api-access-sfrjr\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779685 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6d95631d-1cd5-461f-bb82-541344621973-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779699 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.779710 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d95631d-1cd5-461f-bb82-541344621973-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.798594 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.799539 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.880393 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.880433 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.921777 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.947538 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.957363 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.968078 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.976454 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:16:09 crc kubenswrapper[4773]: E0122 12:16:09.977031 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-httpd" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977046 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-httpd" Jan 22 12:16:09 crc kubenswrapper[4773]: E0122 12:16:09.977071 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-httpd" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977079 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-httpd" Jan 22 12:16:09 crc kubenswrapper[4773]: E0122 12:16:09.977095 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-log" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977103 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-log" Jan 22 12:16:09 crc kubenswrapper[4773]: E0122 12:16:09.977116 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-log" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977123 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-log" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977535 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-log" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977560 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-httpd" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977571 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d95631d-1cd5-461f-bb82-541344621973" containerName="glance-httpd" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.977593 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" containerName="glance-log" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.978741 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.984422 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.984766 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.985059 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.989080 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tmks6" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.989711 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.991496 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.999048 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 22 12:16:09 crc kubenswrapper[4773]: I0122 12:16:09.999357 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.001176 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.021515 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083160 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083211 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083251 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-scripts\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083321 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083346 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvxtb\" (UniqueName: \"kubernetes.io/projected/66b1ac35-e977-43f0-8538-fb7da5bafde0-kube-api-access-tvxtb\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083510 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-logs\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083552 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083630 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083668 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083692 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083716 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083959 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.083989 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.084077 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-logs\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.084139 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv5gm\" (UniqueName: \"kubernetes.io/projected/c5afcb43-61a0-47ad-923e-ca320d6ca49f-kube-api-access-wv5gm\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.084185 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-config-data\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186010 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-logs\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186494 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv5gm\" (UniqueName: \"kubernetes.io/projected/c5afcb43-61a0-47ad-923e-ca320d6ca49f-kube-api-access-wv5gm\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-config-data\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186572 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186605 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186648 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-scripts\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186671 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186695 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvxtb\" (UniqueName: \"kubernetes.io/projected/66b1ac35-e977-43f0-8538-fb7da5bafde0-kube-api-access-tvxtb\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186848 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-logs\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.186953 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187234 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187437 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187484 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187512 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187534 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187559 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.187581 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.194697 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.248181 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-logs\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.251647 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-scripts\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.254880 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-config-data\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.269739 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-logs\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.270429 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.270481 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.270564 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.272662 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv5gm\" (UniqueName: \"kubernetes.io/projected/c5afcb43-61a0-47ad-923e-ca320d6ca49f-kube-api-access-wv5gm\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.272724 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.275060 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.279363 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvxtb\" (UniqueName: \"kubernetes.io/projected/66b1ac35-e977-43f0-8538-fb7da5bafde0-kube-api-access-tvxtb\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.281883 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.283014 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.289828 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.297199 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.300715 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.547266 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.547389 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.670255 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d95631d-1cd5-461f-bb82-541344621973" path="/var/lib/kubelet/pods/6d95631d-1cd5-461f-bb82-541344621973/volumes" Jan 22 12:16:10 crc kubenswrapper[4773]: I0122 12:16:10.675005 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0598514-17c4-4af5-879b-b93cee40e6d1" path="/var/lib/kubelet/pods/b0598514-17c4-4af5-879b-b93cee40e6d1/volumes" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.596923 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.629559 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5hmf\" (UniqueName: \"kubernetes.io/projected/c9b10ad1-cb79-4231-b540-04a494438839-kube-api-access-z5hmf\") pod \"c9b10ad1-cb79-4231-b540-04a494438839\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.629641 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-config\") pod \"c9b10ad1-cb79-4231-b540-04a494438839\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.629730 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-swift-storage-0\") pod \"c9b10ad1-cb79-4231-b540-04a494438839\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.631612 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-svc\") pod \"c9b10ad1-cb79-4231-b540-04a494438839\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.631659 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-nb\") pod \"c9b10ad1-cb79-4231-b540-04a494438839\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.631710 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-sb\") pod \"c9b10ad1-cb79-4231-b540-04a494438839\" (UID: \"c9b10ad1-cb79-4231-b540-04a494438839\") " Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.657625 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9b10ad1-cb79-4231-b540-04a494438839-kube-api-access-z5hmf" (OuterVolumeSpecName: "kube-api-access-z5hmf") pod "c9b10ad1-cb79-4231-b540-04a494438839" (UID: "c9b10ad1-cb79-4231-b540-04a494438839"). InnerVolumeSpecName "kube-api-access-z5hmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.683899 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c9b10ad1-cb79-4231-b540-04a494438839" (UID: "c9b10ad1-cb79-4231-b540-04a494438839"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.684078 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" event={"ID":"c9b10ad1-cb79-4231-b540-04a494438839","Type":"ContainerDied","Data":"e4bd37843ee1c25b9a1e1ed82481b0e74d42ec2dbb547eb3fdbe204cbc33cff3"} Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.684195 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.684522 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-config" (OuterVolumeSpecName: "config") pod "c9b10ad1-cb79-4231-b540-04a494438839" (UID: "c9b10ad1-cb79-4231-b540-04a494438839"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.692027 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c9b10ad1-cb79-4231-b540-04a494438839" (UID: "c9b10ad1-cb79-4231-b540-04a494438839"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.693086 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c9b10ad1-cb79-4231-b540-04a494438839" (UID: "c9b10ad1-cb79-4231-b540-04a494438839"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.700882 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c9b10ad1-cb79-4231-b540-04a494438839" (UID: "c9b10ad1-cb79-4231-b540-04a494438839"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.734468 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.734509 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.734523 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.734534 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.734546 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c9b10ad1-cb79-4231-b540-04a494438839-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.734556 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5hmf\" (UniqueName: \"kubernetes.io/projected/c9b10ad1-cb79-4231-b540-04a494438839-kube-api-access-z5hmf\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:17 crc kubenswrapper[4773]: I0122 12:16:17.863159 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8db84466c-mm9ts" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Jan 22 12:16:18 crc kubenswrapper[4773]: I0122 12:16:18.027383 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-mm9ts"] Jan 22 12:16:18 crc kubenswrapper[4773]: I0122 12:16:18.036393 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-mm9ts"] Jan 22 12:16:18 crc kubenswrapper[4773]: E0122 12:16:18.672710 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Jan 22 12:16:18 crc kubenswrapper[4773]: E0122 12:16:18.673987 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rmw2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-ld8bk_openstack(4958bcb5-02cc-4c59-8b85-c386263dd3b8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 12:16:18 crc kubenswrapper[4773]: I0122 12:16:18.673036 4773 scope.go:117] "RemoveContainer" containerID="989908dd66422bae3ea5f3954f52b791fb611e8c99d00833e9706afd190e436d" Jan 22 12:16:18 crc kubenswrapper[4773]: E0122 12:16:18.675636 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-ld8bk" podUID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" Jan 22 12:16:18 crc kubenswrapper[4773]: I0122 12:16:18.682842 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9b10ad1-cb79-4231-b540-04a494438839" path="/var/lib/kubelet/pods/c9b10ad1-cb79-4231-b540-04a494438839/volumes" Jan 22 12:16:18 crc kubenswrapper[4773]: E0122 12:16:18.703535 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-ld8bk" podUID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" Jan 22 12:16:18 crc kubenswrapper[4773]: I0122 12:16:18.945163 4773 scope.go:117] "RemoveContainer" containerID="854a7d431a7c49e3c06c6f46c1bdc8b8a489f28e77572d4bd867f5bbc9ed6b6a" Jan 22 12:16:18 crc kubenswrapper[4773]: I0122 12:16:18.983365 4773 scope.go:117] "RemoveContainer" containerID="91abf7692787e0f6b27c2a534bf19bf8916cd71be0da7570473523b1c28acafd" Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.032304 4773 scope.go:117] "RemoveContainer" containerID="9f58fa38a9d9aa771343f7e1bb26057f5ba9c615f90418c8ff99eab8a04440e2" Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.062944 4773 scope.go:117] "RemoveContainer" containerID="d2c64ed08bd7d2c55f11bd50f0b6739a7c972c8d23e4edffd7b73003cc73a03c" Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.310224 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:16:19 crc kubenswrapper[4773]: W0122 12:16:19.317214 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66b1ac35_e977_43f0_8538_fb7da5bafde0.slice/crio-a40e1ae31ca9ceacefb2b5069d33728f1efea6e27518d03d884ab8453179bfea WatchSource:0}: Error finding container a40e1ae31ca9ceacefb2b5069d33728f1efea6e27518d03d884ab8453179bfea: Status 404 returned error can't find the container with id a40e1ae31ca9ceacefb2b5069d33728f1efea6e27518d03d884ab8453179bfea Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.326934 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-pjn7v"] Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.441809 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:16:19 crc kubenswrapper[4773]: W0122 12:16:19.457458 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5afcb43_61a0_47ad_923e_ca320d6ca49f.slice/crio-226252d5dc51b11698c2f0c7c9c92dd73a7fe7d37ac97d38307ddbd6d85b67d2 WatchSource:0}: Error finding container 226252d5dc51b11698c2f0c7c9c92dd73a7fe7d37ac97d38307ddbd6d85b67d2: Status 404 returned error can't find the container with id 226252d5dc51b11698c2f0c7c9c92dd73a7fe7d37ac97d38307ddbd6d85b67d2 Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.708609 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4p9mz" event={"ID":"93c45ab9-7e10-4217-93fa-5d801fb55c6d","Type":"ContainerStarted","Data":"c3e7bf2d8e9482874620abd54b6917871f15ea49d5e6938c1ed6c6f4ead38926"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.710533 4773 generic.go:334] "Generic (PLEG): container finished" podID="1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" containerID="83c0def5b56b44cb9f1f01205a511238aa72173c21dd369de734bca93e5a1030" exitCode=0 Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.710587 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vqtpx" event={"ID":"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5","Type":"ContainerDied","Data":"83c0def5b56b44cb9f1f01205a511238aa72173c21dd369de734bca93e5a1030"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.712746 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66b1ac35-e977-43f0-8538-fb7da5bafde0","Type":"ContainerStarted","Data":"a40e1ae31ca9ceacefb2b5069d33728f1efea6e27518d03d884ab8453179bfea"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.714947 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pjn7v" event={"ID":"209de0ff-2316-479c-8ff1-62e1ed260807","Type":"ContainerStarted","Data":"92e115034ee29042f3b09c0140b70b158d673295708a85c55b272e8fff093d15"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.714972 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pjn7v" event={"ID":"209de0ff-2316-479c-8ff1-62e1ed260807","Type":"ContainerStarted","Data":"cb66227881baef735f0f20253e436d78e5462584ee52b1a6f49cde1cf7fa0c31"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.718847 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c5afcb43-61a0-47ad-923e-ca320d6ca49f","Type":"ContainerStarted","Data":"226252d5dc51b11698c2f0c7c9c92dd73a7fe7d37ac97d38307ddbd6d85b67d2"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.720765 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerStarted","Data":"44593c6222305a57348670a7516853ed2d97360e051ca06265b02058e455a08f"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.722063 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fg4x7" event={"ID":"e83eccf7-20cf-4226-b994-5d00f3cef915","Type":"ContainerStarted","Data":"00074010fbe5c8eb219e7350840b0f486f659b0e42b97f83273dec4f5a642383"} Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.734365 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-4p9mz" podStartSLOduration=2.898897329 podStartE2EDuration="26.734342402s" podCreationTimestamp="2026-01-22 12:15:53 +0000 UTC" firstStartedPulling="2026-01-22 12:15:54.841796417 +0000 UTC m=+1262.419912242" lastFinishedPulling="2026-01-22 12:16:18.67724149 +0000 UTC m=+1286.255357315" observedRunningTime="2026-01-22 12:16:19.729530416 +0000 UTC m=+1287.307646271" watchObservedRunningTime="2026-01-22 12:16:19.734342402 +0000 UTC m=+1287.312458227" Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.761336 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-pjn7v" podStartSLOduration=16.76131463 podStartE2EDuration="16.76131463s" podCreationTimestamp="2026-01-22 12:16:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:19.747789815 +0000 UTC m=+1287.325905640" watchObservedRunningTime="2026-01-22 12:16:19.76131463 +0000 UTC m=+1287.339430455" Jan 22 12:16:19 crc kubenswrapper[4773]: I0122 12:16:19.777428 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-fg4x7" podStartSLOduration=3.156775169 podStartE2EDuration="26.777404788s" podCreationTimestamp="2026-01-22 12:15:53 +0000 UTC" firstStartedPulling="2026-01-22 12:15:55.015342676 +0000 UTC m=+1262.593458501" lastFinishedPulling="2026-01-22 12:16:18.635972295 +0000 UTC m=+1286.214088120" observedRunningTime="2026-01-22 12:16:19.773222869 +0000 UTC m=+1287.351338694" watchObservedRunningTime="2026-01-22 12:16:19.777404788 +0000 UTC m=+1287.355520613" Jan 22 12:16:20 crc kubenswrapper[4773]: I0122 12:16:20.737870 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66b1ac35-e977-43f0-8538-fb7da5bafde0","Type":"ContainerStarted","Data":"8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66"} Jan 22 12:16:20 crc kubenswrapper[4773]: I0122 12:16:20.742109 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c5afcb43-61a0-47ad-923e-ca320d6ca49f","Type":"ContainerStarted","Data":"62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f"} Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.392583 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.443810 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-combined-ca-bundle\") pod \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.443878 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plpxb\" (UniqueName: \"kubernetes.io/projected/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-kube-api-access-plpxb\") pod \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.443968 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-config\") pod \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\" (UID: \"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5\") " Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.451402 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-kube-api-access-plpxb" (OuterVolumeSpecName: "kube-api-access-plpxb") pod "1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" (UID: "1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5"). InnerVolumeSpecName "kube-api-access-plpxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.483048 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-config" (OuterVolumeSpecName: "config") pod "1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" (UID: "1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.502719 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" (UID: "1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.545905 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.545940 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plpxb\" (UniqueName: \"kubernetes.io/projected/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-kube-api-access-plpxb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.545955 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.753300 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-vqtpx" event={"ID":"1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5","Type":"ContainerDied","Data":"656db3c7383c4324e10859c52697dcac61977560cb382dbbc9876229554ef8ac"} Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.753357 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="656db3c7383c4324e10859c52697dcac61977560cb382dbbc9876229554ef8ac" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.753440 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-vqtpx" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.768734 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66b1ac35-e977-43f0-8538-fb7da5bafde0","Type":"ContainerStarted","Data":"37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db"} Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.775648 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c5afcb43-61a0-47ad-923e-ca320d6ca49f","Type":"ContainerStarted","Data":"e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89"} Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.778408 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerStarted","Data":"a40f43b556ac9db86063b83ae6174312496308bcc17641ac80e35dc0afa3a783"} Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.805218 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=12.805198262 podStartE2EDuration="12.805198262s" podCreationTimestamp="2026-01-22 12:16:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:21.801580149 +0000 UTC m=+1289.379695994" watchObservedRunningTime="2026-01-22 12:16:21.805198262 +0000 UTC m=+1289.383314087" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.972458 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=12.972431773 podStartE2EDuration="12.972431773s" podCreationTimestamp="2026-01-22 12:16:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:21.841770873 +0000 UTC m=+1289.419886698" watchObservedRunningTime="2026-01-22 12:16:21.972431773 +0000 UTC m=+1289.550547598" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.976257 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-685444497c-dbxfd"] Jan 22 12:16:21 crc kubenswrapper[4773]: E0122 12:16:21.976730 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" containerName="neutron-db-sync" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.976755 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" containerName="neutron-db-sync" Jan 22 12:16:21 crc kubenswrapper[4773]: E0122 12:16:21.976783 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="init" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.976793 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="init" Jan 22 12:16:21 crc kubenswrapper[4773]: E0122 12:16:21.976807 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="dnsmasq-dns" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.976815 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="dnsmasq-dns" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.977034 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" containerName="neutron-db-sync" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.977055 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b10ad1-cb79-4231-b540-04a494438839" containerName="dnsmasq-dns" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.978134 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:21 crc kubenswrapper[4773]: I0122 12:16:21.994706 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-685444497c-dbxfd"] Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.064345 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-649m2\" (UniqueName: \"kubernetes.io/projected/86846e17-f5a8-47a9-8a11-29fcb798e052-kube-api-access-649m2\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.064543 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-svc\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.064635 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-config\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.064667 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-nb\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.064697 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-sb\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.064751 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-swift-storage-0\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.106766 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-55bdb45cbd-kgx4q"] Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.108446 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.113251 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.113451 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.113611 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.115408 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-hsdl7" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.122696 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55bdb45cbd-kgx4q"] Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.166963 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-ovndb-tls-certs\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167084 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-combined-ca-bundle\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167161 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-svc\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167221 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-httpd-config\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167325 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-config\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167359 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbgjk\" (UniqueName: \"kubernetes.io/projected/a3132a41-c2bd-4cbb-b319-25982363decb-kube-api-access-sbgjk\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167397 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-nb\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167442 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-sb\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167511 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-swift-storage-0\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167568 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-649m2\" (UniqueName: \"kubernetes.io/projected/86846e17-f5a8-47a9-8a11-29fcb798e052-kube-api-access-649m2\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.167601 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-config\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.174955 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-svc\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.176003 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-config\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.176573 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-nb\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.177124 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-sb\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.177719 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-swift-storage-0\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.197908 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-649m2\" (UniqueName: \"kubernetes.io/projected/86846e17-f5a8-47a9-8a11-29fcb798e052-kube-api-access-649m2\") pod \"dnsmasq-dns-685444497c-dbxfd\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.269454 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-config\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.269509 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-ovndb-tls-certs\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.269542 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-combined-ca-bundle\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.269588 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-httpd-config\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.269639 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbgjk\" (UniqueName: \"kubernetes.io/projected/a3132a41-c2bd-4cbb-b319-25982363decb-kube-api-access-sbgjk\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.275882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-httpd-config\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.277239 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-config\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.278826 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-ovndb-tls-certs\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.279755 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-combined-ca-bundle\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.289343 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbgjk\" (UniqueName: \"kubernetes.io/projected/a3132a41-c2bd-4cbb-b319-25982363decb-kube-api-access-sbgjk\") pod \"neutron-55bdb45cbd-kgx4q\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.308633 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.427963 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.645953 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-685444497c-dbxfd"] Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.788583 4773 generic.go:334] "Generic (PLEG): container finished" podID="e83eccf7-20cf-4226-b994-5d00f3cef915" containerID="00074010fbe5c8eb219e7350840b0f486f659b0e42b97f83273dec4f5a642383" exitCode=0 Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.788965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fg4x7" event={"ID":"e83eccf7-20cf-4226-b994-5d00f3cef915","Type":"ContainerDied","Data":"00074010fbe5c8eb219e7350840b0f486f659b0e42b97f83273dec4f5a642383"} Jan 22 12:16:22 crc kubenswrapper[4773]: I0122 12:16:22.805564 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-dbxfd" event={"ID":"86846e17-f5a8-47a9-8a11-29fcb798e052","Type":"ContainerStarted","Data":"e5def578050211e9a322cc7a45e087530558db3456aecafb7a1b954b58c73576"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.148637 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-55bdb45cbd-kgx4q"] Jan 22 12:16:23 crc kubenswrapper[4773]: W0122 12:16:23.154761 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3132a41_c2bd_4cbb_b319_25982363decb.slice/crio-8fae5ebcb3266c7cdd0041a297774d2cb2a3f340d7159b66ff0e6c4eedc41233 WatchSource:0}: Error finding container 8fae5ebcb3266c7cdd0041a297774d2cb2a3f340d7159b66ff0e6c4eedc41233: Status 404 returned error can't find the container with id 8fae5ebcb3266c7cdd0041a297774d2cb2a3f340d7159b66ff0e6c4eedc41233 Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.862010 4773 generic.go:334] "Generic (PLEG): container finished" podID="209de0ff-2316-479c-8ff1-62e1ed260807" containerID="92e115034ee29042f3b09c0140b70b158d673295708a85c55b272e8fff093d15" exitCode=0 Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.862566 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pjn7v" event={"ID":"209de0ff-2316-479c-8ff1-62e1ed260807","Type":"ContainerDied","Data":"92e115034ee29042f3b09c0140b70b158d673295708a85c55b272e8fff093d15"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.867591 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bdb45cbd-kgx4q" event={"ID":"a3132a41-c2bd-4cbb-b319-25982363decb","Type":"ContainerStarted","Data":"c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.867636 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bdb45cbd-kgx4q" event={"ID":"a3132a41-c2bd-4cbb-b319-25982363decb","Type":"ContainerStarted","Data":"f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.867646 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bdb45cbd-kgx4q" event={"ID":"a3132a41-c2bd-4cbb-b319-25982363decb","Type":"ContainerStarted","Data":"8fae5ebcb3266c7cdd0041a297774d2cb2a3f340d7159b66ff0e6c4eedc41233"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.868098 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.870521 4773 generic.go:334] "Generic (PLEG): container finished" podID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerID="93d19dd8502242005929e4fbf6b2618a144a4495b3be5cb5ae3bf349abb6bc65" exitCode=0 Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.870568 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-dbxfd" event={"ID":"86846e17-f5a8-47a9-8a11-29fcb798e052","Type":"ContainerDied","Data":"93d19dd8502242005929e4fbf6b2618a144a4495b3be5cb5ae3bf349abb6bc65"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.873187 4773 generic.go:334] "Generic (PLEG): container finished" podID="93c45ab9-7e10-4217-93fa-5d801fb55c6d" containerID="c3e7bf2d8e9482874620abd54b6917871f15ea49d5e6938c1ed6c6f4ead38926" exitCode=0 Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.873487 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4p9mz" event={"ID":"93c45ab9-7e10-4217-93fa-5d801fb55c6d","Type":"ContainerDied","Data":"c3e7bf2d8e9482874620abd54b6917871f15ea49d5e6938c1ed6c6f4ead38926"} Jan 22 12:16:23 crc kubenswrapper[4773]: I0122 12:16:23.918031 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-55bdb45cbd-kgx4q" podStartSLOduration=1.918012496 podStartE2EDuration="1.918012496s" podCreationTimestamp="2026-01-22 12:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:23.9072537 +0000 UTC m=+1291.485369535" watchObservedRunningTime="2026-01-22 12:16:23.918012496 +0000 UTC m=+1291.496128321" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.343523 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f7559d9d9-kljj4"] Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.345617 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.355770 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.358181 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.368191 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f7559d9d9-kljj4"] Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.389337 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fg4x7" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422061 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-config-data\") pod \"e83eccf7-20cf-4226-b994-5d00f3cef915\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422107 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-combined-ca-bundle\") pod \"e83eccf7-20cf-4226-b994-5d00f3cef915\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422142 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-scripts\") pod \"e83eccf7-20cf-4226-b994-5d00f3cef915\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422185 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lgqt\" (UniqueName: \"kubernetes.io/projected/e83eccf7-20cf-4226-b994-5d00f3cef915-kube-api-access-4lgqt\") pod \"e83eccf7-20cf-4226-b994-5d00f3cef915\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422211 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83eccf7-20cf-4226-b994-5d00f3cef915-logs\") pod \"e83eccf7-20cf-4226-b994-5d00f3cef915\" (UID: \"e83eccf7-20cf-4226-b994-5d00f3cef915\") " Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-internal-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422597 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-httpd-config\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422624 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-combined-ca-bundle\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422652 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-config\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422681 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-ovndb-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422716 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chh2t\" (UniqueName: \"kubernetes.io/projected/dfbe6420-884f-4bc0-acf3-a518df4580d9-kube-api-access-chh2t\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.422771 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-public-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.424464 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e83eccf7-20cf-4226-b994-5d00f3cef915-logs" (OuterVolumeSpecName: "logs") pod "e83eccf7-20cf-4226-b994-5d00f3cef915" (UID: "e83eccf7-20cf-4226-b994-5d00f3cef915"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.430128 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e83eccf7-20cf-4226-b994-5d00f3cef915-kube-api-access-4lgqt" (OuterVolumeSpecName: "kube-api-access-4lgqt") pod "e83eccf7-20cf-4226-b994-5d00f3cef915" (UID: "e83eccf7-20cf-4226-b994-5d00f3cef915"). InnerVolumeSpecName "kube-api-access-4lgqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.444383 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-scripts" (OuterVolumeSpecName: "scripts") pod "e83eccf7-20cf-4226-b994-5d00f3cef915" (UID: "e83eccf7-20cf-4226-b994-5d00f3cef915"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.473484 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-config-data" (OuterVolumeSpecName: "config-data") pod "e83eccf7-20cf-4226-b994-5d00f3cef915" (UID: "e83eccf7-20cf-4226-b994-5d00f3cef915"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.477046 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e83eccf7-20cf-4226-b994-5d00f3cef915" (UID: "e83eccf7-20cf-4226-b994-5d00f3cef915"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.523924 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-public-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524028 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-internal-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524064 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-httpd-config\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524087 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-combined-ca-bundle\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524111 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-config\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524136 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-ovndb-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524161 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chh2t\" (UniqueName: \"kubernetes.io/projected/dfbe6420-884f-4bc0-acf3-a518df4580d9-kube-api-access-chh2t\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524210 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524221 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524233 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e83eccf7-20cf-4226-b994-5d00f3cef915-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524242 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lgqt\" (UniqueName: \"kubernetes.io/projected/e83eccf7-20cf-4226-b994-5d00f3cef915-kube-api-access-4lgqt\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.524251 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83eccf7-20cf-4226-b994-5d00f3cef915-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.529094 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-combined-ca-bundle\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.529297 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-httpd-config\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.530670 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-ovndb-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.530699 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-public-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.531223 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-internal-tls-certs\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.532371 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-config\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.552314 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chh2t\" (UniqueName: \"kubernetes.io/projected/dfbe6420-884f-4bc0-acf3-a518df4580d9-kube-api-access-chh2t\") pod \"neutron-6f7559d9d9-kljj4\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.708274 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.898899 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-fg4x7" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.899011 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-fg4x7" event={"ID":"e83eccf7-20cf-4226-b994-5d00f3cef915","Type":"ContainerDied","Data":"7f5e0d4b47b581eed1590fb0bb72c95c5a3d7979faebe1f8037d187fc5f89db4"} Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.899296 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f5e0d4b47b581eed1590fb0bb72c95c5a3d7979faebe1f8037d187fc5f89db4" Jan 22 12:16:24 crc kubenswrapper[4773]: I0122 12:16:24.901081 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-dbxfd" event={"ID":"86846e17-f5a8-47a9-8a11-29fcb798e052","Type":"ContainerStarted","Data":"2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d"} Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.006813 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-685444497c-dbxfd" podStartSLOduration=4.00678858 podStartE2EDuration="4.00678858s" podCreationTimestamp="2026-01-22 12:16:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:24.924488217 +0000 UTC m=+1292.502604042" watchObservedRunningTime="2026-01-22 12:16:25.00678858 +0000 UTC m=+1292.584904405" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.009982 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-77688c4666-rzbr7"] Jan 22 12:16:25 crc kubenswrapper[4773]: E0122 12:16:25.010454 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83eccf7-20cf-4226-b994-5d00f3cef915" containerName="placement-db-sync" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.010476 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83eccf7-20cf-4226-b994-5d00f3cef915" containerName="placement-db-sync" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.010700 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e83eccf7-20cf-4226-b994-5d00f3cef915" containerName="placement-db-sync" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.012275 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.021104 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.021431 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.021472 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.021717 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-js8m7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.015581 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.045117 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-77688c4666-rzbr7"] Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139327 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-internal-tls-certs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139419 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-config-data\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139447 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/878a6ce0-f293-4690-9049-c90155c56ff3-logs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139500 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-combined-ca-bundle\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139529 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st7c6\" (UniqueName: \"kubernetes.io/projected/878a6ce0-f293-4690-9049-c90155c56ff3-kube-api-access-st7c6\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139559 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-public-tls-certs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.139600 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-scripts\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242271 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-internal-tls-certs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242397 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-config-data\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242456 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/878a6ce0-f293-4690-9049-c90155c56ff3-logs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242536 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-combined-ca-bundle\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242559 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st7c6\" (UniqueName: \"kubernetes.io/projected/878a6ce0-f293-4690-9049-c90155c56ff3-kube-api-access-st7c6\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242626 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-public-tls-certs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.242679 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-scripts\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.244732 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/878a6ce0-f293-4690-9049-c90155c56ff3-logs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.253749 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-combined-ca-bundle\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.260358 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-scripts\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.261344 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-internal-tls-certs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.272120 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-public-tls-certs\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.279526 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st7c6\" (UniqueName: \"kubernetes.io/projected/878a6ce0-f293-4690-9049-c90155c56ff3-kube-api-access-st7c6\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.282848 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-config-data\") pod \"placement-77688c4666-rzbr7\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.340792 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f7559d9d9-kljj4"] Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.356959 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:25 crc kubenswrapper[4773]: I0122 12:16:25.914232 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.729401 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.772737 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.834258 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-combined-ca-bundle\") pod \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.836070 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjhkr\" (UniqueName: \"kubernetes.io/projected/93c45ab9-7e10-4217-93fa-5d801fb55c6d-kube-api-access-rjhkr\") pod \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.837625 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-db-sync-config-data\") pod \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\" (UID: \"93c45ab9-7e10-4217-93fa-5d801fb55c6d\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.844499 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93c45ab9-7e10-4217-93fa-5d801fb55c6d-kube-api-access-rjhkr" (OuterVolumeSpecName: "kube-api-access-rjhkr") pod "93c45ab9-7e10-4217-93fa-5d801fb55c6d" (UID: "93c45ab9-7e10-4217-93fa-5d801fb55c6d"). InnerVolumeSpecName "kube-api-access-rjhkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.845125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "93c45ab9-7e10-4217-93fa-5d801fb55c6d" (UID: "93c45ab9-7e10-4217-93fa-5d801fb55c6d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.899109 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "93c45ab9-7e10-4217-93fa-5d801fb55c6d" (UID: "93c45ab9-7e10-4217-93fa-5d801fb55c6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.899273 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-77688c4666-rzbr7"] Jan 22 12:16:29 crc kubenswrapper[4773]: W0122 12:16:29.904053 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod878a6ce0_f293_4690_9049_c90155c56ff3.slice/crio-9ba9b8d0dfb5620da2a7deab5d6a462091a33d2acd252964575c7a3ceda5b006 WatchSource:0}: Error finding container 9ba9b8d0dfb5620da2a7deab5d6a462091a33d2acd252964575c7a3ceda5b006: Status 404 returned error can't find the container with id 9ba9b8d0dfb5620da2a7deab5d6a462091a33d2acd252964575c7a3ceda5b006 Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.940935 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-credential-keys\") pod \"209de0ff-2316-479c-8ff1-62e1ed260807\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.941076 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-config-data\") pod \"209de0ff-2316-479c-8ff1-62e1ed260807\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.941162 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5lzk\" (UniqueName: \"kubernetes.io/projected/209de0ff-2316-479c-8ff1-62e1ed260807-kube-api-access-f5lzk\") pod \"209de0ff-2316-479c-8ff1-62e1ed260807\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.941275 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-combined-ca-bundle\") pod \"209de0ff-2316-479c-8ff1-62e1ed260807\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.941325 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-scripts\") pod \"209de0ff-2316-479c-8ff1-62e1ed260807\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.941398 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-fernet-keys\") pod \"209de0ff-2316-479c-8ff1-62e1ed260807\" (UID: \"209de0ff-2316-479c-8ff1-62e1ed260807\") " Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.942711 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.942748 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjhkr\" (UniqueName: \"kubernetes.io/projected/93c45ab9-7e10-4217-93fa-5d801fb55c6d-kube-api-access-rjhkr\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.942766 4773 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/93c45ab9-7e10-4217-93fa-5d801fb55c6d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.948424 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "209de0ff-2316-479c-8ff1-62e1ed260807" (UID: "209de0ff-2316-479c-8ff1-62e1ed260807"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.948508 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-scripts" (OuterVolumeSpecName: "scripts") pod "209de0ff-2316-479c-8ff1-62e1ed260807" (UID: "209de0ff-2316-479c-8ff1-62e1ed260807"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.949861 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "209de0ff-2316-479c-8ff1-62e1ed260807" (UID: "209de0ff-2316-479c-8ff1-62e1ed260807"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.950244 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/209de0ff-2316-479c-8ff1-62e1ed260807-kube-api-access-f5lzk" (OuterVolumeSpecName: "kube-api-access-f5lzk") pod "209de0ff-2316-479c-8ff1-62e1ed260807" (UID: "209de0ff-2316-479c-8ff1-62e1ed260807"). InnerVolumeSpecName "kube-api-access-f5lzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.956836 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7559d9d9-kljj4" event={"ID":"dfbe6420-884f-4bc0-acf3-a518df4580d9","Type":"ContainerStarted","Data":"25d89a3b3bf70ad872eb445f5cf323a49760461d01cfed204a7ee6e53d5bb535"} Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.956935 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7559d9d9-kljj4" event={"ID":"dfbe6420-884f-4bc0-acf3-a518df4580d9","Type":"ContainerStarted","Data":"5c00b20e125090526df9ad81f3ee67ff8245895e620956ac6e276d63146bdabf"} Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.959690 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-4p9mz" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.959699 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-4p9mz" event={"ID":"93c45ab9-7e10-4217-93fa-5d801fb55c6d","Type":"ContainerDied","Data":"45352a9351ded2c7335de28dc951e32646ccb1dcd868d068bfb4331ff28fb486"} Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.959772 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45352a9351ded2c7335de28dc951e32646ccb1dcd868d068bfb4331ff28fb486" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.969371 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "209de0ff-2316-479c-8ff1-62e1ed260807" (UID: "209de0ff-2316-479c-8ff1-62e1ed260807"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.970755 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerStarted","Data":"d50beff7b691717d8c6f0ffe613cc839800206419ad9ec33400f194d69d7f41e"} Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.974438 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-pjn7v" event={"ID":"209de0ff-2316-479c-8ff1-62e1ed260807","Type":"ContainerDied","Data":"cb66227881baef735f0f20253e436d78e5462584ee52b1a6f49cde1cf7fa0c31"} Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.974688 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb66227881baef735f0f20253e436d78e5462584ee52b1a6f49cde1cf7fa0c31" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.974814 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-pjn7v" Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.976640 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-77688c4666-rzbr7" event={"ID":"878a6ce0-f293-4690-9049-c90155c56ff3","Type":"ContainerStarted","Data":"9ba9b8d0dfb5620da2a7deab5d6a462091a33d2acd252964575c7a3ceda5b006"} Jan 22 12:16:29 crc kubenswrapper[4773]: I0122 12:16:29.989544 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-config-data" (OuterVolumeSpecName: "config-data") pod "209de0ff-2316-479c-8ff1-62e1ed260807" (UID: "209de0ff-2316-479c-8ff1-62e1ed260807"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.044352 4773 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.044388 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.044398 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5lzk\" (UniqueName: \"kubernetes.io/projected/209de0ff-2316-479c-8ff1-62e1ed260807-kube-api-access-f5lzk\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.044408 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.044420 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.044429 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/209de0ff-2316-479c-8ff1-62e1ed260807-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.547464 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.547802 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.547815 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.547825 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.606054 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.606168 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.606805 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.612631 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.936967 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-585c9c99b7-xwz6v"] Jan 22 12:16:30 crc kubenswrapper[4773]: E0122 12:16:30.938546 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93c45ab9-7e10-4217-93fa-5d801fb55c6d" containerName="barbican-db-sync" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.939118 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="93c45ab9-7e10-4217-93fa-5d801fb55c6d" containerName="barbican-db-sync" Jan 22 12:16:30 crc kubenswrapper[4773]: E0122 12:16:30.939219 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209de0ff-2316-479c-8ff1-62e1ed260807" containerName="keystone-bootstrap" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.939234 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="209de0ff-2316-479c-8ff1-62e1ed260807" containerName="keystone-bootstrap" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.939602 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="209de0ff-2316-479c-8ff1-62e1ed260807" containerName="keystone-bootstrap" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.939676 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="93c45ab9-7e10-4217-93fa-5d801fb55c6d" containerName="barbican-db-sync" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.940900 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.943602 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-92k4l" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.946024 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.946416 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.946483 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.946916 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.955719 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-585c9c99b7-xwz6v"] Jan 22 12:16:30 crc kubenswrapper[4773]: I0122 12:16:30.956971 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.022327 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-77688c4666-rzbr7" event={"ID":"878a6ce0-f293-4690-9049-c90155c56ff3","Type":"ContainerStarted","Data":"bca2d75d3d19f7bde38ac5639321578fd5fe1d01e28a264520eae5ab44fb18dd"} Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.022384 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-77688c4666-rzbr7" event={"ID":"878a6ce0-f293-4690-9049-c90155c56ff3","Type":"ContainerStarted","Data":"ed05a9e5edc313b45c9c66ec916cf3d4f450ad7afe2d3bf8f73e940333547c9c"} Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.024213 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.024264 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.028221 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7559d9d9-kljj4" event={"ID":"dfbe6420-884f-4bc0-acf3-a518df4580d9","Type":"ContainerStarted","Data":"041e1a8a7930cad7131da3d75b95ab2b629ba8232a60301999e4dad3d131e952"} Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.028347 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.029056 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.029096 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.029113 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.029335 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.080785 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-scripts\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.080846 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jccs\" (UniqueName: \"kubernetes.io/projected/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-kube-api-access-9jccs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.080894 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-fernet-keys\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.080959 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-combined-ca-bundle\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.080985 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-credential-keys\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.081037 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-public-tls-certs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.081144 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-internal-tls-certs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.081177 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-config-data\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.107883 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-77688c4666-rzbr7" podStartSLOduration=7.107856035 podStartE2EDuration="7.107856035s" podCreationTimestamp="2026-01-22 12:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:31.094863249 +0000 UTC m=+1298.672979094" watchObservedRunningTime="2026-01-22 12:16:31.107856035 +0000 UTC m=+1298.685971870" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.184818 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-combined-ca-bundle\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.184927 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-credential-keys\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.185061 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-public-tls-certs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.185091 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-internal-tls-certs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.185154 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-config-data\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.185506 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-scripts\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.185540 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jccs\" (UniqueName: \"kubernetes.io/projected/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-kube-api-access-9jccs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.185601 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-fernet-keys\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.189494 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-76bfbd897-mgd28"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.191658 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.199663 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.199953 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-dlqk2" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.200129 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.206222 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-combined-ca-bundle\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.211877 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-public-tls-certs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.212459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-internal-tls-certs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.212502 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-76bfbd897-mgd28"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.217869 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-scripts\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.227474 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-credential-keys\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.230737 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-fernet-keys\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.231823 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6f7559d9d9-kljj4" podStartSLOduration=7.231804159 podStartE2EDuration="7.231804159s" podCreationTimestamp="2026-01-22 12:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:31.151590994 +0000 UTC m=+1298.729706829" watchObservedRunningTime="2026-01-22 12:16:31.231804159 +0000 UTC m=+1298.809919984" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.232750 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-config-data\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.288428 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data-custom\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.288512 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c4ea182-f9b7-4895-872a-592fa6972eaa-logs\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.288577 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.288619 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmc8c\" (UniqueName: \"kubernetes.io/projected/7c4ea182-f9b7-4895-872a-592fa6972eaa-kube-api-access-lmc8c\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.288654 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-combined-ca-bundle\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.288956 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jccs\" (UniqueName: \"kubernetes.io/projected/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-kube-api-access-9jccs\") pod \"keystone-585c9c99b7-xwz6v\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.329858 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.331678 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7df9c7dc4-7qqhx"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.340022 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.351637 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.369278 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7df9c7dc4-7qqhx"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.395278 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data-custom\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.395404 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c4ea182-f9b7-4895-872a-592fa6972eaa-logs\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.395506 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.395578 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmc8c\" (UniqueName: \"kubernetes.io/projected/7c4ea182-f9b7-4895-872a-592fa6972eaa-kube-api-access-lmc8c\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.395749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-combined-ca-bundle\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.425698 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-685444497c-dbxfd"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.426005 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-685444497c-dbxfd" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="dnsmasq-dns" containerID="cri-o://2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d" gracePeriod=10 Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.438723 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.479465 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.484851 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c4ea182-f9b7-4895-872a-592fa6972eaa-logs\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.492553 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-89x6f"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.499184 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data-custom\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.501253 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.501302 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data-custom\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.501570 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-logs\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.501622 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-combined-ca-bundle\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.501673 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5llx7\" (UniqueName: \"kubernetes.io/projected/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-kube-api-access-5llx7\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.517688 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-combined-ca-bundle\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.518255 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmc8c\" (UniqueName: \"kubernetes.io/projected/7c4ea182-f9b7-4895-872a-592fa6972eaa-kube-api-access-lmc8c\") pod \"barbican-worker-76bfbd897-mgd28\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.521064 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.574390 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-89x6f"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.611449 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-config\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.625786 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.625989 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626094 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data-custom\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626525 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626632 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626761 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-logs\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626826 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-combined-ca-bundle\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626898 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.626952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5llx7\" (UniqueName: \"kubernetes.io/projected/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-kube-api-access-5llx7\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.627047 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7z88\" (UniqueName: \"kubernetes.io/projected/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-kube-api-access-g7z88\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.628002 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-logs\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.645314 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data-custom\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.652580 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-combined-ca-bundle\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.667662 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.672825 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5llx7\" (UniqueName: \"kubernetes.io/projected/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-kube-api-access-5llx7\") pod \"barbican-keystone-listener-7df9c7dc4-7qqhx\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.933941 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-config\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.934033 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.934080 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.934116 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.934165 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.934203 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7z88\" (UniqueName: \"kubernetes.io/projected/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-kube-api-access-g7z88\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.935655 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5b45bc49b8-vq989"] Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.936155 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-config\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.937649 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.938138 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.939001 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:31 crc kubenswrapper[4773]: I0122 12:16:31.939339 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.055455 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.074347 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.080905 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.088688 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7z88\" (UniqueName: \"kubernetes.io/projected/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-kube-api-access-g7z88\") pod \"dnsmasq-dns-66cdd4b5b5-89x6f\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.130341 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.150761 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7d56c9f5dc-qd9zs"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.153999 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.155456 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.177230 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-85878465f6-ss54r"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.183563 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-combined-ca-bundle\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.183647 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.183702 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data-custom\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.183742 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2fv7\" (UniqueName: \"kubernetes.io/projected/286ffdd7-6466-4f26-8d94-76a84d97490f-kube-api-access-x2fv7\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.183780 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/286ffdd7-6466-4f26-8d94-76a84d97490f-logs\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.190040 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.230065 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7d56c9f5dc-qd9zs"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.263561 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-85878465f6-ss54r"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.286920 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data-custom\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287038 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m22r\" (UniqueName: \"kubernetes.io/projected/885b1538-0800-47c6-bedd-284b67cd08ca-kube-api-access-7m22r\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287091 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data-custom\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287119 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-logs\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287182 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-combined-ca-bundle\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287205 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-combined-ca-bundle\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287230 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287314 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287342 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data-custom\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287387 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2fv7\" (UniqueName: \"kubernetes.io/projected/286ffdd7-6466-4f26-8d94-76a84d97490f-kube-api-access-x2fv7\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287418 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287447 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/286ffdd7-6466-4f26-8d94-76a84d97490f-logs\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287473 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8cdh\" (UniqueName: \"kubernetes.io/projected/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-kube-api-access-x8cdh\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287504 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-combined-ca-bundle\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.287544 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885b1538-0800-47c6-bedd-284b67cd08ca-logs\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.302988 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-combined-ca-bundle\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.304389 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b45bc49b8-vq989"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.308117 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/286ffdd7-6466-4f26-8d94-76a84d97490f-logs\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.310499 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-685444497c-dbxfd" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.151:5353: connect: connection refused" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.316115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data-custom\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.325703 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.351702 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2fv7\" (UniqueName: \"kubernetes.io/projected/286ffdd7-6466-4f26-8d94-76a84d97490f-kube-api-access-x2fv7\") pod \"barbican-api-5b45bc49b8-vq989\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.359270 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-76746cdbc6-gbqml"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.361632 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: E0122 12:16:32.367423 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86846e17_f5a8_47a9_8a11_29fcb798e052.slice/crio-conmon-2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86846e17_f5a8_47a9_8a11_29fcb798e052.slice/crio-2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d.scope\": RecentStats: unable to find data in memory cache]" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.370541 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-76746cdbc6-gbqml"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.389359 4773 generic.go:334] "Generic (PLEG): container finished" podID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerID="2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d" exitCode=0 Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.390470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-dbxfd" event={"ID":"86846e17-f5a8-47a9-8a11-29fcb798e052","Type":"ContainerDied","Data":"2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d"} Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393196 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393254 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393296 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8cdh\" (UniqueName: \"kubernetes.io/projected/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-kube-api-access-x8cdh\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393326 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-combined-ca-bundle\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393355 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885b1538-0800-47c6-bedd-284b67cd08ca-logs\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393386 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data-custom\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393427 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m22r\" (UniqueName: \"kubernetes.io/projected/885b1538-0800-47c6-bedd-284b67cd08ca-kube-api-access-7m22r\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393467 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data-custom\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393500 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-logs\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.393537 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-combined-ca-bundle\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.395793 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885b1538-0800-47c6-bedd-284b67cd08ca-logs\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.399322 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-combined-ca-bundle\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.399776 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-logs\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.408105 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.421442 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-combined-ca-bundle\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.421543 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data-custom\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.421951 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.437033 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m22r\" (UniqueName: \"kubernetes.io/projected/885b1538-0800-47c6-bedd-284b67cd08ca-kube-api-access-7m22r\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.437606 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data-custom\") pod \"barbican-keystone-listener-85878465f6-ss54r\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.462942 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8cdh\" (UniqueName: \"kubernetes.io/projected/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-kube-api-access-x8cdh\") pod \"barbican-worker-7d56c9f5dc-qd9zs\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.486881 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.503201 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.503258 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5jzz\" (UniqueName: \"kubernetes.io/projected/5dd744cf-1206-46a3-8af5-c18012bc1461-kube-api-access-h5jzz\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.503421 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-combined-ca-bundle\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.503483 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data-custom\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.503534 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd744cf-1206-46a3-8af5-c18012bc1461-logs\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.552470 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.556733 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.613213 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.613317 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5jzz\" (UniqueName: \"kubernetes.io/projected/5dd744cf-1206-46a3-8af5-c18012bc1461-kube-api-access-h5jzz\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.613461 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-combined-ca-bundle\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.616163 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data-custom\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.616262 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd744cf-1206-46a3-8af5-c18012bc1461-logs\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.618132 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd744cf-1206-46a3-8af5-c18012bc1461-logs\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.634540 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data-custom\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.639367 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-585c9c99b7-xwz6v"] Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.644686 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-combined-ca-bundle\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.656761 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.669000 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5jzz\" (UniqueName: \"kubernetes.io/projected/5dd744cf-1206-46a3-8af5-c18012bc1461-kube-api-access-h5jzz\") pod \"barbican-api-76746cdbc6-gbqml\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.709355 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:32 crc kubenswrapper[4773]: W0122 12:16:32.751890 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b99e28b_4c7c_4ec2_83b3_05658c9dd0ef.slice/crio-d10fd517c03fe43c512120d217a0c4745c2bbc39da2079ad25d1d0a3d62406b2 WatchSource:0}: Error finding container d10fd517c03fe43c512120d217a0c4745c2bbc39da2079ad25d1d0a3d62406b2: Status 404 returned error can't find the container with id d10fd517c03fe43c512120d217a0c4745c2bbc39da2079ad25d1d0a3d62406b2 Jan 22 12:16:32 crc kubenswrapper[4773]: I0122 12:16:32.967501 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.050564 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-swift-storage-0\") pod \"86846e17-f5a8-47a9-8a11-29fcb798e052\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.050768 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-sb\") pod \"86846e17-f5a8-47a9-8a11-29fcb798e052\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.050968 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-svc\") pod \"86846e17-f5a8-47a9-8a11-29fcb798e052\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.051030 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-nb\") pod \"86846e17-f5a8-47a9-8a11-29fcb798e052\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.051120 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-649m2\" (UniqueName: \"kubernetes.io/projected/86846e17-f5a8-47a9-8a11-29fcb798e052-kube-api-access-649m2\") pod \"86846e17-f5a8-47a9-8a11-29fcb798e052\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.051161 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-config\") pod \"86846e17-f5a8-47a9-8a11-29fcb798e052\" (UID: \"86846e17-f5a8-47a9-8a11-29fcb798e052\") " Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.108347 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86846e17-f5a8-47a9-8a11-29fcb798e052-kube-api-access-649m2" (OuterVolumeSpecName: "kube-api-access-649m2") pod "86846e17-f5a8-47a9-8a11-29fcb798e052" (UID: "86846e17-f5a8-47a9-8a11-29fcb798e052"). InnerVolumeSpecName "kube-api-access-649m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.155144 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-649m2\" (UniqueName: \"kubernetes.io/projected/86846e17-f5a8-47a9-8a11-29fcb798e052-kube-api-access-649m2\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.176290 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-config" (OuterVolumeSpecName: "config") pod "86846e17-f5a8-47a9-8a11-29fcb798e052" (UID: "86846e17-f5a8-47a9-8a11-29fcb798e052"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.188847 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "86846e17-f5a8-47a9-8a11-29fcb798e052" (UID: "86846e17-f5a8-47a9-8a11-29fcb798e052"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.189534 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "86846e17-f5a8-47a9-8a11-29fcb798e052" (UID: "86846e17-f5a8-47a9-8a11-29fcb798e052"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.209004 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "86846e17-f5a8-47a9-8a11-29fcb798e052" (UID: "86846e17-f5a8-47a9-8a11-29fcb798e052"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.282288 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.282356 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.282368 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.282381 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.388611 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "86846e17-f5a8-47a9-8a11-29fcb798e052" (UID: "86846e17-f5a8-47a9-8a11-29fcb798e052"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.435614 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-585c9c99b7-xwz6v" event={"ID":"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef","Type":"ContainerStarted","Data":"d10fd517c03fe43c512120d217a0c4745c2bbc39da2079ad25d1d0a3d62406b2"} Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.454442 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-dbxfd" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.468758 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.468835 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.470273 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-dbxfd" event={"ID":"86846e17-f5a8-47a9-8a11-29fcb798e052","Type":"ContainerDied","Data":"e5def578050211e9a322cc7a45e087530558db3456aecafb7a1b954b58c73576"} Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.470356 4773 scope.go:117] "RemoveContainer" containerID="2fdf64fdfb5b62e8a7db7c7f6a781548fab524967bb99f85c9bf601f99e10e9d" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.470929 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.470982 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.502964 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86846e17-f5a8-47a9-8a11-29fcb798e052-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.574840 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-685444497c-dbxfd"] Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.610990 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-685444497c-dbxfd"] Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.642708 4773 scope.go:117] "RemoveContainer" containerID="93d19dd8502242005929e4fbf6b2618a144a4495b3be5cb5ae3bf349abb6bc65" Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.713177 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-89x6f"] Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.871245 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-76bfbd897-mgd28"] Jan 22 12:16:33 crc kubenswrapper[4773]: I0122 12:16:33.882325 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b45bc49b8-vq989"] Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.154750 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7df9c7dc4-7qqhx"] Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.170633 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7d56c9f5dc-qd9zs"] Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.334006 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-76746cdbc6-gbqml"] Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.388528 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-85878465f6-ss54r"] Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.493829 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" event={"ID":"885b1538-0800-47c6-bedd-284b67cd08ca","Type":"ContainerStarted","Data":"3ca8e34c1aece56e84463dab57c063eb49ccf046d864db450b64d581f5177856"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.502538 4773 generic.go:334] "Generic (PLEG): container finished" podID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerID="3413e819aefbdd88c96605e9c48eef0f8d140caea90e150ffb78ac99c8775407" exitCode=0 Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.502675 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" event={"ID":"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa","Type":"ContainerDied","Data":"3413e819aefbdd88c96605e9c48eef0f8d140caea90e150ffb78ac99c8775407"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.502716 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" event={"ID":"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa","Type":"ContainerStarted","Data":"798d4fe494f5dce1062f4f89710bf956259a4604f18be3d8b3e0b209c557d755"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.643368 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76746cdbc6-gbqml" event={"ID":"5dd744cf-1206-46a3-8af5-c18012bc1461","Type":"ContainerStarted","Data":"43af7f4401465846d94a89c525bc8a9584deabc96a10d3ab372cd4fc220cc7e4"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.703523 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" path="/var/lib/kubelet/pods/86846e17-f5a8-47a9-8a11-29fcb798e052/volumes" Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.706551 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-585c9c99b7-xwz6v" event={"ID":"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef","Type":"ContainerStarted","Data":"31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.706695 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.706937 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" event={"ID":"a5dc7991-ffde-4ef2-9668-e07d7c4aa614","Type":"ContainerStarted","Data":"d4bbf41adb87935d04b3119f36e9161d98c445515e323bebefc418d38c64e3ff"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.710552 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-76bfbd897-mgd28" event={"ID":"7c4ea182-f9b7-4895-872a-592fa6972eaa","Type":"ContainerStarted","Data":"ea6d66eb9421cec8b1349f9262fa23d70602ffdcac9df3512d620ef06decd7c9"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.710800 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" event={"ID":"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a","Type":"ContainerStarted","Data":"cb83c421cd9737c8cc2cc60d7601bf50b44bffd3d1f79c424106d8877cd6322f"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.722491 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-585c9c99b7-xwz6v" podStartSLOduration=4.722468113 podStartE2EDuration="4.722468113s" podCreationTimestamp="2026-01-22 12:16:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:34.710766664 +0000 UTC m=+1302.288882509" watchObservedRunningTime="2026-01-22 12:16:34.722468113 +0000 UTC m=+1302.300583938" Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.771031 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b45bc49b8-vq989" event={"ID":"286ffdd7-6466-4f26-8d94-76a84d97490f","Type":"ContainerStarted","Data":"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee"} Jan 22 12:16:34 crc kubenswrapper[4773]: I0122 12:16:34.771102 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b45bc49b8-vq989" event={"ID":"286ffdd7-6466-4f26-8d94-76a84d97490f","Type":"ContainerStarted","Data":"164f242b00f11cb833b3d43cf5c09527788a7780f41c6f6c9da8a8060c396c9e"} Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.812677 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b45bc49b8-vq989" event={"ID":"286ffdd7-6466-4f26-8d94-76a84d97490f","Type":"ContainerStarted","Data":"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2"} Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.814342 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.814376 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.840391 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" event={"ID":"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa","Type":"ContainerStarted","Data":"380677fd1c336ba88e8eeed7c645b7a6151fb3e230f08ceea3c7a50b00a2f40f"} Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.841781 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.854222 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76746cdbc6-gbqml" event={"ID":"5dd744cf-1206-46a3-8af5-c18012bc1461","Type":"ContainerStarted","Data":"8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d"} Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.854278 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76746cdbc6-gbqml" event={"ID":"5dd744cf-1206-46a3-8af5-c18012bc1461","Type":"ContainerStarted","Data":"937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a"} Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.854377 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.855027 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.862933 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5b45bc49b8-vq989" podStartSLOduration=4.862906322 podStartE2EDuration="4.862906322s" podCreationTimestamp="2026-01-22 12:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:35.843799525 +0000 UTC m=+1303.421915360" watchObservedRunningTime="2026-01-22 12:16:35.862906322 +0000 UTC m=+1303.441022147" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.912485 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" podStartSLOduration=4.912440324 podStartE2EDuration="4.912440324s" podCreationTimestamp="2026-01-22 12:16:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:35.898820401 +0000 UTC m=+1303.476936236" watchObservedRunningTime="2026-01-22 12:16:35.912440324 +0000 UTC m=+1303.490556149" Jan 22 12:16:35 crc kubenswrapper[4773]: I0122 12:16:35.939900 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-76746cdbc6-gbqml" podStartSLOduration=3.9398786660000003 podStartE2EDuration="3.939878666s" podCreationTimestamp="2026-01-22 12:16:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:35.933877337 +0000 UTC m=+1303.511993152" watchObservedRunningTime="2026-01-22 12:16:35.939878666 +0000 UTC m=+1303.517994491" Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.312393 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.312516 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.414599 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.673241 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.673289 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.875494 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ld8bk" event={"ID":"4958bcb5-02cc-4c59-8b85-c386263dd3b8","Type":"ContainerStarted","Data":"b3bc996c5fce4a346e5c10f4bff70bbba9478b888eca409f8c86b9e8ed609774"} Jan 22 12:16:36 crc kubenswrapper[4773]: I0122 12:16:36.924066 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-ld8bk" podStartSLOduration=5.077349589 podStartE2EDuration="44.924034511s" podCreationTimestamp="2026-01-22 12:15:52 +0000 UTC" firstStartedPulling="2026-01-22 12:15:54.60854705 +0000 UTC m=+1262.186662875" lastFinishedPulling="2026-01-22 12:16:34.455231982 +0000 UTC m=+1302.033347797" observedRunningTime="2026-01-22 12:16:36.90371983 +0000 UTC m=+1304.481835655" watchObservedRunningTime="2026-01-22 12:16:36.924034511 +0000 UTC m=+1304.502150336" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.027603 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5b45bc49b8-vq989"] Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.062807 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-67f94f9664-cd544"] Jan 22 12:16:37 crc kubenswrapper[4773]: E0122 12:16:37.063209 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="init" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.063233 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="init" Jan 22 12:16:37 crc kubenswrapper[4773]: E0122 12:16:37.063272 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="dnsmasq-dns" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.063282 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="dnsmasq-dns" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.063464 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="86846e17-f5a8-47a9-8a11-29fcb798e052" containerName="dnsmasq-dns" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.064649 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.078010 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.078919 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.095360 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-67f94f9664-cd544"] Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204637 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204696 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-logs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204825 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8bq2\" (UniqueName: \"kubernetes.io/projected/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-kube-api-access-j8bq2\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204869 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-internal-tls-certs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204900 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-public-tls-certs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-combined-ca-bundle\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.204976 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data-custom\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.308646 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8bq2\" (UniqueName: \"kubernetes.io/projected/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-kube-api-access-j8bq2\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.308754 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-internal-tls-certs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.308802 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-public-tls-certs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.308843 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-combined-ca-bundle\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.308905 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data-custom\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.309078 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.309100 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-logs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.309798 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-logs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.327552 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-internal-tls-certs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.329479 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-public-tls-certs\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.333249 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-combined-ca-bundle\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.334481 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data-custom\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.335041 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.339831 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8bq2\" (UniqueName: \"kubernetes.io/projected/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-kube-api-access-j8bq2\") pod \"barbican-api-67f94f9664-cd544\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:37 crc kubenswrapper[4773]: I0122 12:16:37.391342 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:38 crc kubenswrapper[4773]: I0122 12:16:38.934572 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" event={"ID":"a5dc7991-ffde-4ef2-9668-e07d7c4aa614","Type":"ContainerStarted","Data":"7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527"} Jan 22 12:16:38 crc kubenswrapper[4773]: I0122 12:16:38.935005 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-67f94f9664-cd544"] Jan 22 12:16:38 crc kubenswrapper[4773]: W0122 12:16:38.945400 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62b80aa9_cd54_4a8a_aea4_0fcbe7a4d94e.slice/crio-d3893ebe0b69c21f423ee65d072031e25359fc316ec91e348695754111d8b041 WatchSource:0}: Error finding container d3893ebe0b69c21f423ee65d072031e25359fc316ec91e348695754111d8b041: Status 404 returned error can't find the container with id d3893ebe0b69c21f423ee65d072031e25359fc316ec91e348695754111d8b041 Jan 22 12:16:38 crc kubenswrapper[4773]: I0122 12:16:38.946936 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" event={"ID":"885b1538-0800-47c6-bedd-284b67cd08ca","Type":"ContainerStarted","Data":"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58"} Jan 22 12:16:38 crc kubenswrapper[4773]: I0122 12:16:38.949926 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5b45bc49b8-vq989" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api-log" containerID="cri-o://59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee" gracePeriod=30 Jan 22 12:16:38 crc kubenswrapper[4773]: I0122 12:16:38.950183 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" event={"ID":"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a","Type":"ContainerStarted","Data":"0b78a302284ae6361ce3497090c8271c599effd0064f02890c0ccdec52d89429"} Jan 22 12:16:38 crc kubenswrapper[4773]: I0122 12:16:38.950531 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5b45bc49b8-vq989" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api" containerID="cri-o://fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2" gracePeriod=30 Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.613856 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.779537 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2fv7\" (UniqueName: \"kubernetes.io/projected/286ffdd7-6466-4f26-8d94-76a84d97490f-kube-api-access-x2fv7\") pod \"286ffdd7-6466-4f26-8d94-76a84d97490f\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.779667 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data\") pod \"286ffdd7-6466-4f26-8d94-76a84d97490f\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.779781 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/286ffdd7-6466-4f26-8d94-76a84d97490f-logs\") pod \"286ffdd7-6466-4f26-8d94-76a84d97490f\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.779813 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-combined-ca-bundle\") pod \"286ffdd7-6466-4f26-8d94-76a84d97490f\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.779843 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data-custom\") pod \"286ffdd7-6466-4f26-8d94-76a84d97490f\" (UID: \"286ffdd7-6466-4f26-8d94-76a84d97490f\") " Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.781592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/286ffdd7-6466-4f26-8d94-76a84d97490f-logs" (OuterVolumeSpecName: "logs") pod "286ffdd7-6466-4f26-8d94-76a84d97490f" (UID: "286ffdd7-6466-4f26-8d94-76a84d97490f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.785551 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/286ffdd7-6466-4f26-8d94-76a84d97490f-kube-api-access-x2fv7" (OuterVolumeSpecName: "kube-api-access-x2fv7") pod "286ffdd7-6466-4f26-8d94-76a84d97490f" (UID: "286ffdd7-6466-4f26-8d94-76a84d97490f"). InnerVolumeSpecName "kube-api-access-x2fv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.803636 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "286ffdd7-6466-4f26-8d94-76a84d97490f" (UID: "286ffdd7-6466-4f26-8d94-76a84d97490f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.865444 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data" (OuterVolumeSpecName: "config-data") pod "286ffdd7-6466-4f26-8d94-76a84d97490f" (UID: "286ffdd7-6466-4f26-8d94-76a84d97490f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.871754 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "286ffdd7-6466-4f26-8d94-76a84d97490f" (UID: "286ffdd7-6466-4f26-8d94-76a84d97490f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.883151 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2fv7\" (UniqueName: \"kubernetes.io/projected/286ffdd7-6466-4f26-8d94-76a84d97490f-kube-api-access-x2fv7\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.883189 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.883201 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/286ffdd7-6466-4f26-8d94-76a84d97490f-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.883216 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.883230 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/286ffdd7-6466-4f26-8d94-76a84d97490f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.961779 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" event={"ID":"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a","Type":"ContainerStarted","Data":"8bbcbdbd880d4b84550ff53526ebb7d1c4d1b044cf62d3e78710cb2168597c9d"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965184 4773 generic.go:334] "Generic (PLEG): container finished" podID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerID="fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2" exitCode=0 Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965223 4773 generic.go:334] "Generic (PLEG): container finished" podID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerID="59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee" exitCode=143 Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965271 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b45bc49b8-vq989" event={"ID":"286ffdd7-6466-4f26-8d94-76a84d97490f","Type":"ContainerDied","Data":"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965324 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b45bc49b8-vq989" event={"ID":"286ffdd7-6466-4f26-8d94-76a84d97490f","Type":"ContainerDied","Data":"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965339 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b45bc49b8-vq989" event={"ID":"286ffdd7-6466-4f26-8d94-76a84d97490f","Type":"ContainerDied","Data":"164f242b00f11cb833b3d43cf5c09527788a7780f41c6f6c9da8a8060c396c9e"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965359 4773 scope.go:117] "RemoveContainer" containerID="fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.965509 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b45bc49b8-vq989" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.976177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67f94f9664-cd544" event={"ID":"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e","Type":"ContainerStarted","Data":"de6c3ff5cc24752d11a5ecf283955ba10ea0171066338d4427fd50840f65e7fb"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.976220 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67f94f9664-cd544" event={"ID":"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e","Type":"ContainerStarted","Data":"2c623358850ff2ff54ddcf8841a4f6e0e66ef622633842e8bc1dabec1720076d"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.976231 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67f94f9664-cd544" event={"ID":"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e","Type":"ContainerStarted","Data":"d3893ebe0b69c21f423ee65d072031e25359fc316ec91e348695754111d8b041"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.977074 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.977104 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.981796 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" event={"ID":"a5dc7991-ffde-4ef2-9668-e07d7c4aa614","Type":"ContainerStarted","Data":"4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.987136 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" event={"ID":"885b1538-0800-47c6-bedd-284b67cd08ca","Type":"ContainerStarted","Data":"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e"} Jan 22 12:16:39 crc kubenswrapper[4773]: I0122 12:16:39.998828 4773 scope.go:117] "RemoveContainer" containerID="59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.004432 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-76bfbd897-mgd28" event={"ID":"7c4ea182-f9b7-4895-872a-592fa6972eaa","Type":"ContainerStarted","Data":"3c0d596b0fa1a47f1e8f30b97cd1ba68aeab9cc91180fcd1b6c4cbc6892bfc4e"} Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.004487 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-76bfbd897-mgd28" event={"ID":"7c4ea182-f9b7-4895-872a-592fa6972eaa","Type":"ContainerStarted","Data":"4d2be8053b6427fe67d8fef3b415ac1ed506237e31a8229383f6fddeed7dd137"} Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.013478 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" podStartSLOduration=4.762079559 podStartE2EDuration="9.013454636s" podCreationTimestamp="2026-01-22 12:16:31 +0000 UTC" firstStartedPulling="2026-01-22 12:16:34.192374913 +0000 UTC m=+1301.770490738" lastFinishedPulling="2026-01-22 12:16:38.44374999 +0000 UTC m=+1306.021865815" observedRunningTime="2026-01-22 12:16:39.990614274 +0000 UTC m=+1307.568730099" watchObservedRunningTime="2026-01-22 12:16:40.013454636 +0000 UTC m=+1307.591570461" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.020367 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5b45bc49b8-vq989"] Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.057370 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5b45bc49b8-vq989"] Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.061195 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" podStartSLOduration=4.829214497 podStartE2EDuration="9.061168918s" podCreationTimestamp="2026-01-22 12:16:31 +0000 UTC" firstStartedPulling="2026-01-22 12:16:34.207773656 +0000 UTC m=+1301.785889471" lastFinishedPulling="2026-01-22 12:16:38.439728057 +0000 UTC m=+1306.017843892" observedRunningTime="2026-01-22 12:16:40.038720046 +0000 UTC m=+1307.616835871" watchObservedRunningTime="2026-01-22 12:16:40.061168918 +0000 UTC m=+1307.639284753" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.091363 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-76bfbd897-mgd28"] Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.094263 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" podStartSLOduration=5.108181127 podStartE2EDuration="9.094242107s" podCreationTimestamp="2026-01-22 12:16:31 +0000 UTC" firstStartedPulling="2026-01-22 12:16:34.453674018 +0000 UTC m=+1302.031789843" lastFinishedPulling="2026-01-22 12:16:38.439734998 +0000 UTC m=+1306.017850823" observedRunningTime="2026-01-22 12:16:40.080018517 +0000 UTC m=+1307.658134342" watchObservedRunningTime="2026-01-22 12:16:40.094242107 +0000 UTC m=+1307.672357932" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.102310 4773 scope.go:117] "RemoveContainer" containerID="fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2" Jan 22 12:16:40 crc kubenswrapper[4773]: E0122 12:16:40.114249 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2\": container with ID starting with fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2 not found: ID does not exist" containerID="fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.114550 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2"} err="failed to get container status \"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2\": rpc error: code = NotFound desc = could not find container \"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2\": container with ID starting with fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2 not found: ID does not exist" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.114627 4773 scope.go:117] "RemoveContainer" containerID="59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee" Jan 22 12:16:40 crc kubenswrapper[4773]: E0122 12:16:40.124357 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee\": container with ID starting with 59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee not found: ID does not exist" containerID="59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.124673 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee"} err="failed to get container status \"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee\": rpc error: code = NotFound desc = could not find container \"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee\": container with ID starting with 59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee not found: ID does not exist" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.124806 4773 scope.go:117] "RemoveContainer" containerID="fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.124472 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7df9c7dc4-7qqhx"] Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.125721 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2"} err="failed to get container status \"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2\": rpc error: code = NotFound desc = could not find container \"fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2\": container with ID starting with fe9e42300bbe2a7e0d5dd9f65adec2cd28de2ba547557f8d50d3198a5190c6e2 not found: ID does not exist" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.125771 4773 scope.go:117] "RemoveContainer" containerID="59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.126239 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-67f94f9664-cd544" podStartSLOduration=3.126219186 podStartE2EDuration="3.126219186s" podCreationTimestamp="2026-01-22 12:16:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:40.110207106 +0000 UTC m=+1307.688322921" watchObservedRunningTime="2026-01-22 12:16:40.126219186 +0000 UTC m=+1307.704335011" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.126447 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee"} err="failed to get container status \"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee\": rpc error: code = NotFound desc = could not find container \"59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee\": container with ID starting with 59107cfd52159399988845ccee01c06ece3d653f7b4ecc2ea96e3f1f5efe00ee not found: ID does not exist" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.140597 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-76bfbd897-mgd28" podStartSLOduration=4.64791285 podStartE2EDuration="9.14058103s" podCreationTimestamp="2026-01-22 12:16:31 +0000 UTC" firstStartedPulling="2026-01-22 12:16:33.94822645 +0000 UTC m=+1301.526342275" lastFinishedPulling="2026-01-22 12:16:38.44089463 +0000 UTC m=+1306.019010455" observedRunningTime="2026-01-22 12:16:40.135367013 +0000 UTC m=+1307.713482838" watchObservedRunningTime="2026-01-22 12:16:40.14058103 +0000 UTC m=+1307.718696855" Jan 22 12:16:40 crc kubenswrapper[4773]: I0122 12:16:40.669261 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" path="/var/lib/kubelet/pods/286ffdd7-6466-4f26-8d94-76a84d97490f/volumes" Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.027189 4773 generic.go:334] "Generic (PLEG): container finished" podID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" containerID="b3bc996c5fce4a346e5c10f4bff70bbba9478b888eca409f8c86b9e8ed609774" exitCode=0 Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.027313 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ld8bk" event={"ID":"4958bcb5-02cc-4c59-8b85-c386263dd3b8","Type":"ContainerDied","Data":"b3bc996c5fce4a346e5c10f4bff70bbba9478b888eca409f8c86b9e8ed609774"} Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.028087 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-76bfbd897-mgd28" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker-log" containerID="cri-o://4d2be8053b6427fe67d8fef3b415ac1ed506237e31a8229383f6fddeed7dd137" gracePeriod=30 Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.028136 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-76bfbd897-mgd28" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker" containerID="cri-o://3c0d596b0fa1a47f1e8f30b97cd1ba68aeab9cc91180fcd1b6c4cbc6892bfc4e" gracePeriod=30 Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.028587 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener-log" containerID="cri-o://0b78a302284ae6361ce3497090c8271c599effd0064f02890c0ccdec52d89429" gracePeriod=30 Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.028669 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener" containerID="cri-o://8bbcbdbd880d4b84550ff53526ebb7d1c4d1b044cf62d3e78710cb2168597c9d" gracePeriod=30 Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.158835 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.233811 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-fgdwh"] Jan 22 12:16:42 crc kubenswrapper[4773]: I0122 12:16:42.234106 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="dnsmasq-dns" containerID="cri-o://e03f6de4c0dcdbbb21483a33d8dda8eb301f643f04d7aaa6df94dfd9ec3c8d11" gracePeriod=10 Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.054842 4773 generic.go:334] "Generic (PLEG): container finished" podID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerID="e03f6de4c0dcdbbb21483a33d8dda8eb301f643f04d7aaa6df94dfd9ec3c8d11" exitCode=0 Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.054878 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" event={"ID":"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99","Type":"ContainerDied","Data":"e03f6de4c0dcdbbb21483a33d8dda8eb301f643f04d7aaa6df94dfd9ec3c8d11"} Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.059991 4773 generic.go:334] "Generic (PLEG): container finished" podID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerID="3c0d596b0fa1a47f1e8f30b97cd1ba68aeab9cc91180fcd1b6c4cbc6892bfc4e" exitCode=0 Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.060015 4773 generic.go:334] "Generic (PLEG): container finished" podID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerID="4d2be8053b6427fe67d8fef3b415ac1ed506237e31a8229383f6fddeed7dd137" exitCode=143 Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.060059 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-76bfbd897-mgd28" event={"ID":"7c4ea182-f9b7-4895-872a-592fa6972eaa","Type":"ContainerDied","Data":"3c0d596b0fa1a47f1e8f30b97cd1ba68aeab9cc91180fcd1b6c4cbc6892bfc4e"} Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.060087 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-76bfbd897-mgd28" event={"ID":"7c4ea182-f9b7-4895-872a-592fa6972eaa","Type":"ContainerDied","Data":"4d2be8053b6427fe67d8fef3b415ac1ed506237e31a8229383f6fddeed7dd137"} Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.061893 4773 generic.go:334] "Generic (PLEG): container finished" podID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerID="8bbcbdbd880d4b84550ff53526ebb7d1c4d1b044cf62d3e78710cb2168597c9d" exitCode=0 Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.061914 4773 generic.go:334] "Generic (PLEG): container finished" podID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerID="0b78a302284ae6361ce3497090c8271c599effd0064f02890c0ccdec52d89429" exitCode=143 Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.062058 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" event={"ID":"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a","Type":"ContainerDied","Data":"8bbcbdbd880d4b84550ff53526ebb7d1c4d1b044cf62d3e78710cb2168597c9d"} Jan 22 12:16:43 crc kubenswrapper[4773]: I0122 12:16:43.062077 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" event={"ID":"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a","Type":"ContainerDied","Data":"0b78a302284ae6361ce3497090c8271c599effd0064f02890c0ccdec52d89429"} Jan 22 12:16:44 crc kubenswrapper[4773]: I0122 12:16:44.340587 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:44 crc kubenswrapper[4773]: I0122 12:16:44.362915 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:44 crc kubenswrapper[4773]: I0122 12:16:44.772311 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.145:5353: connect: connection refused" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.469826 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.469876 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.556277 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658550 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4958bcb5-02cc-4c59-8b85-c386263dd3b8-etc-machine-id\") pod \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658610 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-sb\") pod \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658647 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-nb\") pod \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658685 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c4ea182-f9b7-4895-872a-592fa6972eaa-logs\") pod \"7c4ea182-f9b7-4895-872a-592fa6972eaa\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658728 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-combined-ca-bundle\") pod \"7c4ea182-f9b7-4895-872a-592fa6972eaa\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658777 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-config\") pod \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658823 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmw2g\" (UniqueName: \"kubernetes.io/projected/4958bcb5-02cc-4c59-8b85-c386263dd3b8-kube-api-access-rmw2g\") pod \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658846 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-scripts\") pod \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658868 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data-custom\") pod \"7c4ea182-f9b7-4895-872a-592fa6972eaa\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658910 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-config-data\") pod \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658958 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmc8c\" (UniqueName: \"kubernetes.io/projected/7c4ea182-f9b7-4895-872a-592fa6972eaa-kube-api-access-lmc8c\") pod \"7c4ea182-f9b7-4895-872a-592fa6972eaa\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.658980 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-svc\") pod \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659014 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-db-sync-config-data\") pod \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659044 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjxnv\" (UniqueName: \"kubernetes.io/projected/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-kube-api-access-wjxnv\") pod \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659080 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data\") pod \"7c4ea182-f9b7-4895-872a-592fa6972eaa\" (UID: \"7c4ea182-f9b7-4895-872a-592fa6972eaa\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659120 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-combined-ca-bundle\") pod \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\" (UID: \"4958bcb5-02cc-4c59-8b85-c386263dd3b8\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659145 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-swift-storage-0\") pod \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\" (UID: \"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659163 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4958bcb5-02cc-4c59-8b85-c386263dd3b8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4958bcb5-02cc-4c59-8b85-c386263dd3b8" (UID: "4958bcb5-02cc-4c59-8b85-c386263dd3b8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.659891 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c4ea182-f9b7-4895-872a-592fa6972eaa-logs" (OuterVolumeSpecName: "logs") pod "7c4ea182-f9b7-4895-872a-592fa6972eaa" (UID: "7c4ea182-f9b7-4895-872a-592fa6972eaa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.667833 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4958bcb5-02cc-4c59-8b85-c386263dd3b8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.667862 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7c4ea182-f9b7-4895-872a-592fa6972eaa-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.667895 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-scripts" (OuterVolumeSpecName: "scripts") pod "4958bcb5-02cc-4c59-8b85-c386263dd3b8" (UID: "4958bcb5-02cc-4c59-8b85-c386263dd3b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.668058 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4958bcb5-02cc-4c59-8b85-c386263dd3b8-kube-api-access-rmw2g" (OuterVolumeSpecName: "kube-api-access-rmw2g") pod "4958bcb5-02cc-4c59-8b85-c386263dd3b8" (UID: "4958bcb5-02cc-4c59-8b85-c386263dd3b8"). InnerVolumeSpecName "kube-api-access-rmw2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.671720 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7c4ea182-f9b7-4895-872a-592fa6972eaa" (UID: "7c4ea182-f9b7-4895-872a-592fa6972eaa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.672237 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4958bcb5-02cc-4c59-8b85-c386263dd3b8" (UID: "4958bcb5-02cc-4c59-8b85-c386263dd3b8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.679954 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-kube-api-access-wjxnv" (OuterVolumeSpecName: "kube-api-access-wjxnv") pod "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" (UID: "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99"). InnerVolumeSpecName "kube-api-access-wjxnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.688888 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c4ea182-f9b7-4895-872a-592fa6972eaa-kube-api-access-lmc8c" (OuterVolumeSpecName: "kube-api-access-lmc8c") pod "7c4ea182-f9b7-4895-872a-592fa6972eaa" (UID: "7c4ea182-f9b7-4895-872a-592fa6972eaa"). InnerVolumeSpecName "kube-api-access-lmc8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.737825 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" (UID: "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.738409 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c4ea182-f9b7-4895-872a-592fa6972eaa" (UID: "7c4ea182-f9b7-4895-872a-592fa6972eaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.741589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4958bcb5-02cc-4c59-8b85-c386263dd3b8" (UID: "4958bcb5-02cc-4c59-8b85-c386263dd3b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.749046 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" (UID: "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.752576 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" (UID: "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.755488 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data" (OuterVolumeSpecName: "config-data") pod "7c4ea182-f9b7-4895-872a-592fa6972eaa" (UID: "7c4ea182-f9b7-4895-872a-592fa6972eaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.755817 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" (UID: "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.759013 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-config" (OuterVolumeSpecName: "config") pod "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" (UID: "5dcbb5f2-81aa-41f4-be2c-9dd41c074f99"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.768590 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-config-data" (OuterVolumeSpecName: "config-data") pod "4958bcb5-02cc-4c59-8b85-c386263dd3b8" (UID: "4958bcb5-02cc-4c59-8b85-c386263dd3b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.771672 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmw2g\" (UniqueName: \"kubernetes.io/projected/4958bcb5-02cc-4c59-8b85-c386263dd3b8-kube-api-access-rmw2g\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772001 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772120 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772222 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772307 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmc8c\" (UniqueName: \"kubernetes.io/projected/7c4ea182-f9b7-4895-872a-592fa6972eaa-kube-api-access-lmc8c\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772385 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772444 4773 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772507 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjxnv\" (UniqueName: \"kubernetes.io/projected/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-kube-api-access-wjxnv\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772570 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772634 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4958bcb5-02cc-4c59-8b85-c386263dd3b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772718 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772820 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.772953 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.773036 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4ea182-f9b7-4895-872a-592fa6972eaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.773121 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.861904 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.976566 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5llx7\" (UniqueName: \"kubernetes.io/projected/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-kube-api-access-5llx7\") pod \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.976636 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data\") pod \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.976686 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data-custom\") pod \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.976737 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-combined-ca-bundle\") pod \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.976786 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-logs\") pod \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\" (UID: \"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a\") " Jan 22 12:16:46 crc kubenswrapper[4773]: I0122 12:16:46.977637 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-logs" (OuterVolumeSpecName: "logs") pod "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" (UID: "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.007113 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" (UID: "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.008523 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-kube-api-access-5llx7" (OuterVolumeSpecName: "kube-api-access-5llx7") pod "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" (UID: "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a"). InnerVolumeSpecName "kube-api-access-5llx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.029592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" (UID: "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.040247 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data" (OuterVolumeSpecName: "config-data") pod "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" (UID: "2d2e9071-d1a4-4bb4-bb10-304d76c0f96a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.080831 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5llx7\" (UniqueName: \"kubernetes.io/projected/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-kube-api-access-5llx7\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.080889 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.080903 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.080915 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.080928 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.109586 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" event={"ID":"2d2e9071-d1a4-4bb4-bb10-304d76c0f96a","Type":"ContainerDied","Data":"cb83c421cd9737c8cc2cc60d7601bf50b44bffd3d1f79c424106d8877cd6322f"} Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.109661 4773 scope.go:117] "RemoveContainer" containerID="8bbcbdbd880d4b84550ff53526ebb7d1c4d1b044cf62d3e78710cb2168597c9d" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.109825 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7df9c7dc4-7qqhx" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.117163 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-ld8bk" event={"ID":"4958bcb5-02cc-4c59-8b85-c386263dd3b8","Type":"ContainerDied","Data":"63a14ed4e5f6d8891dd2093bdd3446b4e3a1e85748b41926b6f3295f4c72973b"} Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.117229 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63a14ed4e5f6d8891dd2093bdd3446b4e3a1e85748b41926b6f3295f4c72973b" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.117245 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-ld8bk" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.121442 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" event={"ID":"5dcbb5f2-81aa-41f4-be2c-9dd41c074f99","Type":"ContainerDied","Data":"5ba715668cd0bb299e779bdb70c92553dfccdc7587463efb80d9b8b89023d1d9"} Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.121588 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-fgdwh" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.134802 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerStarted","Data":"2c79230e5a448011a105ad38c073954408f90104f9b558f93e9e046d37bbfd1a"} Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.134931 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-central-agent" containerID="cri-o://44593c6222305a57348670a7516853ed2d97360e051ca06265b02058e455a08f" gracePeriod=30 Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.134990 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="proxy-httpd" containerID="cri-o://2c79230e5a448011a105ad38c073954408f90104f9b558f93e9e046d37bbfd1a" gracePeriod=30 Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.135091 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="sg-core" containerID="cri-o://d50beff7b691717d8c6f0ffe613cc839800206419ad9ec33400f194d69d7f41e" gracePeriod=30 Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.135132 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.135175 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-notification-agent" containerID="cri-o://a40f43b556ac9db86063b83ae6174312496308bcc17641ac80e35dc0afa3a783" gracePeriod=30 Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.149972 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-76bfbd897-mgd28" event={"ID":"7c4ea182-f9b7-4895-872a-592fa6972eaa","Type":"ContainerDied","Data":"ea6d66eb9421cec8b1349f9262fa23d70602ffdcac9df3512d620ef06decd7c9"} Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.150126 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-76bfbd897-mgd28" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.175256 4773 scope.go:117] "RemoveContainer" containerID="0b78a302284ae6361ce3497090c8271c599effd0064f02890c0ccdec52d89429" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.181728 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.856748589 podStartE2EDuration="54.181688639s" podCreationTimestamp="2026-01-22 12:15:53 +0000 UTC" firstStartedPulling="2026-01-22 12:15:54.99412205 +0000 UTC m=+1262.572237875" lastFinishedPulling="2026-01-22 12:16:46.3190621 +0000 UTC m=+1313.897177925" observedRunningTime="2026-01-22 12:16:47.175510045 +0000 UTC m=+1314.753625890" watchObservedRunningTime="2026-01-22 12:16:47.181688639 +0000 UTC m=+1314.759804464" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.195171 4773 scope.go:117] "RemoveContainer" containerID="e03f6de4c0dcdbbb21483a33d8dda8eb301f643f04d7aaa6df94dfd9ec3c8d11" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.215539 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-fgdwh"] Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.221046 4773 scope.go:117] "RemoveContainer" containerID="3727c12b7dcc27228baf58bdefa1203496c5c97590da3ec6a1e744ce45b8a029" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.231188 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-fgdwh"] Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.240222 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-76bfbd897-mgd28"] Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.248481 4773 scope.go:117] "RemoveContainer" containerID="3c0d596b0fa1a47f1e8f30b97cd1ba68aeab9cc91180fcd1b6c4cbc6892bfc4e" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.248636 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-76bfbd897-mgd28"] Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.257182 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7df9c7dc4-7qqhx"] Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.269207 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-7df9c7dc4-7qqhx"] Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.294526 4773 scope.go:117] "RemoveContainer" containerID="4d2be8053b6427fe67d8fef3b415ac1ed506237e31a8229383f6fddeed7dd137" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.898473 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.898966 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.898986 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api-log" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899006 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899013 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899027 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="init" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899036 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="init" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899050 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="dnsmasq-dns" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899058 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="dnsmasq-dns" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899073 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899080 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker-log" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899097 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" containerName="cinder-db-sync" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899105 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" containerName="cinder-db-sync" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899119 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899127 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899146 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899153 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api" Jan 22 12:16:47 crc kubenswrapper[4773]: E0122 12:16:47.899171 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.899178 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908370 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908419 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" containerName="barbican-worker-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908433 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908447 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="286ffdd7-6466-4f26-8d94-76a84d97490f" containerName="barbican-api-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908460 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" containerName="cinder-db-sync" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908472 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908497 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" containerName="dnsmasq-dns" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.908522 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" containerName="barbican-keystone-listener-log" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.910003 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.915081 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.915308 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.915357 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xzrrd" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.915664 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 12:16:47 crc kubenswrapper[4773]: I0122 12:16:47.916970 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.001205 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.001538 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.001621 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-scripts\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.001648 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvbvl\" (UniqueName: \"kubernetes.io/projected/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-kube-api-access-bvbvl\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.001671 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.001687 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.009967 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-xw2mz"] Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.011600 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.052817 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-xw2mz"] Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103333 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103377 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103410 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-scripts\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103432 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103460 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9vvc\" (UniqueName: \"kubernetes.io/projected/68b61a4c-4e01-4588-877f-91017ce2df24-kube-api-access-k9vvc\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103477 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvbvl\" (UniqueName: \"kubernetes.io/projected/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-kube-api-access-bvbvl\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103498 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103514 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103527 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103570 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-config\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103590 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.103653 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.110398 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.129644 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.129729 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-scripts\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.132354 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.136974 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.145996 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvbvl\" (UniqueName: \"kubernetes.io/projected/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-kube-api-access-bvbvl\") pod \"cinder-scheduler-0\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.204919 4773 generic.go:334] "Generic (PLEG): container finished" podID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerID="2c79230e5a448011a105ad38c073954408f90104f9b558f93e9e046d37bbfd1a" exitCode=0 Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.204963 4773 generic.go:334] "Generic (PLEG): container finished" podID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerID="d50beff7b691717d8c6f0ffe613cc839800206419ad9ec33400f194d69d7f41e" exitCode=2 Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.204974 4773 generic.go:334] "Generic (PLEG): container finished" podID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerID="44593c6222305a57348670a7516853ed2d97360e051ca06265b02058e455a08f" exitCode=0 Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.205052 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerDied","Data":"2c79230e5a448011a105ad38c073954408f90104f9b558f93e9e046d37bbfd1a"} Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.205086 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerDied","Data":"d50beff7b691717d8c6f0ffe613cc839800206419ad9ec33400f194d69d7f41e"} Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.205098 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerDied","Data":"44593c6222305a57348670a7516853ed2d97360e051ca06265b02058e455a08f"} Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.206455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-config\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.206597 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.206625 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.206664 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.206695 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9vvc\" (UniqueName: \"kubernetes.io/projected/68b61a4c-4e01-4588-877f-91017ce2df24-kube-api-access-k9vvc\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.206739 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.207816 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.208479 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-config\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.209097 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.209550 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.214397 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.236368 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.272792 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9vvc\" (UniqueName: \"kubernetes.io/projected/68b61a4c-4e01-4588-877f-91017ce2df24-kube-api-access-k9vvc\") pod \"dnsmasq-dns-75dbb546bf-xw2mz\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.331394 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.495427 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.505837 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.518214 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.547914 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.621787 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.621836 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data-custom\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.621910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-scripts\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.621938 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8hlv\" (UniqueName: \"kubernetes.io/projected/174a4c39-5867-48de-a5a7-972ba37ead2e-kube-api-access-j8hlv\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.622017 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174a4c39-5867-48de-a5a7-972ba37ead2e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.622059 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.622109 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174a4c39-5867-48de-a5a7-972ba37ead2e-logs\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.711034 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d2e9071-d1a4-4bb4-bb10-304d76c0f96a" path="/var/lib/kubelet/pods/2d2e9071-d1a4-4bb4-bb10-304d76c0f96a/volumes" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.713270 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dcbb5f2-81aa-41f4-be2c-9dd41c074f99" path="/var/lib/kubelet/pods/5dcbb5f2-81aa-41f4-be2c-9dd41c074f99/volumes" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.714252 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c4ea182-f9b7-4895-872a-592fa6972eaa" path="/var/lib/kubelet/pods/7c4ea182-f9b7-4895-872a-592fa6972eaa/volumes" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724048 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724093 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data-custom\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724152 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-scripts\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724172 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8hlv\" (UniqueName: \"kubernetes.io/projected/174a4c39-5867-48de-a5a7-972ba37ead2e-kube-api-access-j8hlv\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724236 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174a4c39-5867-48de-a5a7-972ba37ead2e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724265 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724317 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174a4c39-5867-48de-a5a7-972ba37ead2e-logs\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.724827 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174a4c39-5867-48de-a5a7-972ba37ead2e-logs\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.727832 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174a4c39-5867-48de-a5a7-972ba37ead2e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.733894 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data-custom\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.735098 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.735430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-scripts\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.738920 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.754211 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8hlv\" (UniqueName: \"kubernetes.io/projected/174a4c39-5867-48de-a5a7-972ba37ead2e-kube-api-access-j8hlv\") pod \"cinder-api-0\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " pod="openstack/cinder-api-0" Jan 22 12:16:48 crc kubenswrapper[4773]: I0122 12:16:48.927882 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.043037 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.181924 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-xw2mz"] Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.284685 4773 generic.go:334] "Generic (PLEG): container finished" podID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerID="a40f43b556ac9db86063b83ae6174312496308bcc17641ac80e35dc0afa3a783" exitCode=0 Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.284748 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerDied","Data":"a40f43b556ac9db86063b83ae6174312496308bcc17641ac80e35dc0afa3a783"} Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.286532 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" event={"ID":"68b61a4c-4e01-4588-877f-91017ce2df24","Type":"ContainerStarted","Data":"39aa190fa5efab3796562777b79a946e8515dbe0fc6fe8c0d92af46e9e73b659"} Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.288939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4","Type":"ContainerStarted","Data":"38c6d09e71eedcd5e8eb0353197ae6895cfbc3c658a8da46c011f2b59f6629fd"} Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.307077 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.894006 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974088 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-combined-ca-bundle\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974166 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-scripts\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974198 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-log-httpd\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974275 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-sg-core-conf-yaml\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974326 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnpdg\" (UniqueName: \"kubernetes.io/projected/7ce9300d-fcb4-43e9-810f-5000109e6799-kube-api-access-rnpdg\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974362 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-config-data\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.974385 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-run-httpd\") pod \"7ce9300d-fcb4-43e9-810f-5000109e6799\" (UID: \"7ce9300d-fcb4-43e9-810f-5000109e6799\") " Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.975650 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.977755 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:49 crc kubenswrapper[4773]: I0122 12:16:49.986389 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-scripts" (OuterVolumeSpecName: "scripts") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:49.999209 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ce9300d-fcb4-43e9-810f-5000109e6799-kube-api-access-rnpdg" (OuterVolumeSpecName: "kube-api-access-rnpdg") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "kube-api-access-rnpdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.026892 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.076479 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.076876 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.076889 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.076901 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnpdg\" (UniqueName: \"kubernetes.io/projected/7ce9300d-fcb4-43e9-810f-5000109e6799-kube-api-access-rnpdg\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.076911 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ce9300d-fcb4-43e9-810f-5000109e6799-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.082026 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.121446 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.146377 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-config-data" (OuterVolumeSpecName: "config-data") pod "7ce9300d-fcb4-43e9-810f-5000109e6799" (UID: "7ce9300d-fcb4-43e9-810f-5000109e6799"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.179574 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.179631 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ce9300d-fcb4-43e9-810f-5000109e6799-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.319911 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"174a4c39-5867-48de-a5a7-972ba37ead2e","Type":"ContainerStarted","Data":"8f43b85a2231ba15d7d528cb26e40b25543868cb822794ca56da363c864fca66"} Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.323654 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ce9300d-fcb4-43e9-810f-5000109e6799","Type":"ContainerDied","Data":"20ed79f8166a789fb245b971a14affde6c137759c88d76160fef3496ce5905b3"} Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.323747 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.323752 4773 scope.go:117] "RemoveContainer" containerID="2c79230e5a448011a105ad38c073954408f90104f9b558f93e9e046d37bbfd1a" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.330126 4773 generic.go:334] "Generic (PLEG): container finished" podID="68b61a4c-4e01-4588-877f-91017ce2df24" containerID="55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0" exitCode=0 Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.330181 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" event={"ID":"68b61a4c-4e01-4588-877f-91017ce2df24","Type":"ContainerDied","Data":"55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0"} Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.382858 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.403780 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.410767 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.504653 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:16:50 crc kubenswrapper[4773]: E0122 12:16:50.505360 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-central-agent" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505389 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-central-agent" Jan 22 12:16:50 crc kubenswrapper[4773]: E0122 12:16:50.505400 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-notification-agent" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505410 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-notification-agent" Jan 22 12:16:50 crc kubenswrapper[4773]: E0122 12:16:50.505439 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="proxy-httpd" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505449 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="proxy-httpd" Jan 22 12:16:50 crc kubenswrapper[4773]: E0122 12:16:50.505460 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="sg-core" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505467 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="sg-core" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505680 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="sg-core" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505698 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-central-agent" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505712 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="ceilometer-notification-agent" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.505722 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" containerName="proxy-httpd" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.507772 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.513873 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.513882 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.554479 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.568500 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-76746cdbc6-gbqml"] Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.568821 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-76746cdbc6-gbqml" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api-log" containerID="cri-o://937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a" gracePeriod=30 Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.568909 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-76746cdbc6-gbqml" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api" containerID="cri-o://8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d" gracePeriod=30 Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.587825 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-76746cdbc6-gbqml" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": EOF" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.588760 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.588927 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-config-data\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.588959 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-scripts\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.589013 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.589075 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-run-httpd\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.589206 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc7mv\" (UniqueName: \"kubernetes.io/projected/f568dc9b-1245-4e94-9b82-45b619c401ad-kube-api-access-jc7mv\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.589234 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-log-httpd\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.665359 4773 scope.go:117] "RemoveContainer" containerID="d50beff7b691717d8c6f0ffe613cc839800206419ad9ec33400f194d69d7f41e" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690306 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-config-data\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690348 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-scripts\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690374 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690420 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-run-httpd\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690520 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-log-httpd\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690544 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc7mv\" (UniqueName: \"kubernetes.io/projected/f568dc9b-1245-4e94-9b82-45b619c401ad-kube-api-access-jc7mv\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.690622 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.698384 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-log-httpd\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.698647 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-run-httpd\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.702670 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ce9300d-fcb4-43e9-810f-5000109e6799" path="/var/lib/kubelet/pods/7ce9300d-fcb4-43e9-810f-5000109e6799/volumes" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.707490 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.708705 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-config-data\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.710701 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.716272 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc7mv\" (UniqueName: \"kubernetes.io/projected/f568dc9b-1245-4e94-9b82-45b619c401ad-kube-api-access-jc7mv\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.719547 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-scripts\") pod \"ceilometer-0\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.872759 4773 scope.go:117] "RemoveContainer" containerID="a40f43b556ac9db86063b83ae6174312496308bcc17641ac80e35dc0afa3a783" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.941883 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:16:50 crc kubenswrapper[4773]: I0122 12:16:50.962157 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.080273 4773 scope.go:117] "RemoveContainer" containerID="44593c6222305a57348670a7516853ed2d97360e051ca06265b02058e455a08f" Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.355986 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"174a4c39-5867-48de-a5a7-972ba37ead2e","Type":"ContainerStarted","Data":"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68"} Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.365464 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" event={"ID":"68b61a4c-4e01-4588-877f-91017ce2df24","Type":"ContainerStarted","Data":"7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0"} Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.366937 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.391530 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" podStartSLOduration=4.3915101100000005 podStartE2EDuration="4.39151011s" podCreationTimestamp="2026-01-22 12:16:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:51.388705261 +0000 UTC m=+1318.966821086" watchObservedRunningTime="2026-01-22 12:16:51.39151011 +0000 UTC m=+1318.969625935" Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.410216 4773 generic.go:334] "Generic (PLEG): container finished" podID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerID="937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a" exitCode=143 Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.410274 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76746cdbc6-gbqml" event={"ID":"5dd744cf-1206-46a3-8af5-c18012bc1461","Type":"ContainerDied","Data":"937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a"} Jan 22 12:16:51 crc kubenswrapper[4773]: I0122 12:16:51.678303 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:16:51 crc kubenswrapper[4773]: W0122 12:16:51.686329 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf568dc9b_1245_4e94_9b82_45b619c401ad.slice/crio-efb23cb1355639f2cccf8f5a54bd8e31b1a9f8d9503eb10035f051f2f1eb80c7 WatchSource:0}: Error finding container efb23cb1355639f2cccf8f5a54bd8e31b1a9f8d9503eb10035f051f2f1eb80c7: Status 404 returned error can't find the container with id efb23cb1355639f2cccf8f5a54bd8e31b1a9f8d9503eb10035f051f2f1eb80c7 Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.458359 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.496716 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4","Type":"ContainerStarted","Data":"ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6"} Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.547157 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"174a4c39-5867-48de-a5a7-972ba37ead2e","Type":"ContainerStarted","Data":"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5"} Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.554430 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api-log" containerID="cri-o://c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68" gracePeriod=30 Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.554505 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api" containerID="cri-o://b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5" gracePeriod=30 Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.575531 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerStarted","Data":"efb23cb1355639f2cccf8f5a54bd8e31b1a9f8d9503eb10035f051f2f1eb80c7"} Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.777119 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.777094948 podStartE2EDuration="4.777094948s" podCreationTimestamp="2026-01-22 12:16:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:52.601339208 +0000 UTC m=+1320.179455053" watchObservedRunningTime="2026-01-22 12:16:52.777094948 +0000 UTC m=+1320.355210773" Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.959080 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6f7559d9d9-kljj4"] Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.963996 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6f7559d9d9-kljj4" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-httpd" containerID="cri-o://041e1a8a7930cad7131da3d75b95ab2b629ba8232a60301999e4dad3d131e952" gracePeriod=30 Jan 22 12:16:52 crc kubenswrapper[4773]: I0122 12:16:52.960558 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6f7559d9d9-kljj4" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-api" containerID="cri-o://25d89a3b3bf70ad872eb445f5cf323a49760461d01cfed204a7ee6e53d5bb535" gracePeriod=30 Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.007760 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5c54fcb95c-24djg"] Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.009642 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.033345 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c54fcb95c-24djg"] Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.041768 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6f7559d9d9-kljj4" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.153:9696/\": read tcp 10.217.0.2:60206->10.217.0.153:9696: read: connection reset by peer" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.057924 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-combined-ca-bundle\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.058406 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk6d8\" (UniqueName: \"kubernetes.io/projected/a393de80-9ad0-413e-a2a1-6ee14de22049-kube-api-access-zk6d8\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.058762 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-public-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.060223 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-ovndb-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.060539 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-internal-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.060672 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-httpd-config\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.060816 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-config\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: E0122 12:16:53.158695 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod174a4c39_5867_48de_a5a7_972ba37ead2e.slice/crio-b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5.scope\": RecentStats: unable to find data in memory cache]" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165534 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-public-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165642 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-ovndb-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165703 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-internal-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165724 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-httpd-config\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-config\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165833 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-combined-ca-bundle\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.165865 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk6d8\" (UniqueName: \"kubernetes.io/projected/a393de80-9ad0-413e-a2a1-6ee14de22049-kube-api-access-zk6d8\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.177769 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-config\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.181394 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-combined-ca-bundle\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.184838 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-httpd-config\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.207572 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-internal-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.207936 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-public-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.208243 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-ovndb-tls-certs\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.208466 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk6d8\" (UniqueName: \"kubernetes.io/projected/a393de80-9ad0-413e-a2a1-6ee14de22049-kube-api-access-zk6d8\") pod \"neutron-5c54fcb95c-24djg\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.477113 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.569603 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.600115 4773 generic.go:334] "Generic (PLEG): container finished" podID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerID="041e1a8a7930cad7131da3d75b95ab2b629ba8232a60301999e4dad3d131e952" exitCode=0 Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.600213 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7559d9d9-kljj4" event={"ID":"dfbe6420-884f-4bc0-acf3-a518df4580d9","Type":"ContainerDied","Data":"041e1a8a7930cad7131da3d75b95ab2b629ba8232a60301999e4dad3d131e952"} Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636035 4773 generic.go:334] "Generic (PLEG): container finished" podID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerID="b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5" exitCode=0 Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636067 4773 generic.go:334] "Generic (PLEG): container finished" podID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerID="c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68" exitCode=143 Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636111 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"174a4c39-5867-48de-a5a7-972ba37ead2e","Type":"ContainerDied","Data":"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5"} Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636143 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"174a4c39-5867-48de-a5a7-972ba37ead2e","Type":"ContainerDied","Data":"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68"} Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636154 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"174a4c39-5867-48de-a5a7-972ba37ead2e","Type":"ContainerDied","Data":"8f43b85a2231ba15d7d528cb26e40b25543868cb822794ca56da363c864fca66"} Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636170 4773 scope.go:117] "RemoveContainer" containerID="b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.636971 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.655583 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerStarted","Data":"869e673da9e09061daf56b6a4605ccdcc3169f4e4c75425ecafc6f9b435e6c16"} Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.667544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4","Type":"ContainerStarted","Data":"acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff"} Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.709303 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data-custom\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.709586 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-scripts\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.709720 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174a4c39-5867-48de-a5a7-972ba37ead2e-logs\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.709823 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8hlv\" (UniqueName: \"kubernetes.io/projected/174a4c39-5867-48de-a5a7-972ba37ead2e-kube-api-access-j8hlv\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.709889 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174a4c39-5867-48de-a5a7-972ba37ead2e-etc-machine-id\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.709976 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-combined-ca-bundle\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.710058 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data\") pod \"174a4c39-5867-48de-a5a7-972ba37ead2e\" (UID: \"174a4c39-5867-48de-a5a7-972ba37ead2e\") " Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.710642 4773 scope.go:117] "RemoveContainer" containerID="c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.710977 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/174a4c39-5867-48de-a5a7-972ba37ead2e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.715589 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.943071384 podStartE2EDuration="6.71556296s" podCreationTimestamp="2026-01-22 12:16:47 +0000 UTC" firstStartedPulling="2026-01-22 12:16:49.097842353 +0000 UTC m=+1316.675958178" lastFinishedPulling="2026-01-22 12:16:50.870333929 +0000 UTC m=+1318.448449754" observedRunningTime="2026-01-22 12:16:53.695573308 +0000 UTC m=+1321.273689133" watchObservedRunningTime="2026-01-22 12:16:53.71556296 +0000 UTC m=+1321.293678785" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.718871 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/174a4c39-5867-48de-a5a7-972ba37ead2e-logs" (OuterVolumeSpecName: "logs") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.813729 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174a4c39-5867-48de-a5a7-972ba37ead2e-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.813771 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174a4c39-5867-48de-a5a7-972ba37ead2e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.816476 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/174a4c39-5867-48de-a5a7-972ba37ead2e-kube-api-access-j8hlv" (OuterVolumeSpecName: "kube-api-access-j8hlv") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "kube-api-access-j8hlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.826739 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-scripts" (OuterVolumeSpecName: "scripts") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.856878 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.861757 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.880311 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-76746cdbc6-gbqml" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:40212->10.217.0.162:9311: read: connection reset by peer" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.880428 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-76746cdbc6-gbqml" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:40222->10.217.0.162:9311: read: connection reset by peer" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.924073 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8hlv\" (UniqueName: \"kubernetes.io/projected/174a4c39-5867-48de-a5a7-972ba37ead2e-kube-api-access-j8hlv\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.924118 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.924130 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.924140 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:53 crc kubenswrapper[4773]: I0122 12:16:53.929399 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data" (OuterVolumeSpecName: "config-data") pod "174a4c39-5867-48de-a5a7-972ba37ead2e" (UID: "174a4c39-5867-48de-a5a7-972ba37ead2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.025684 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174a4c39-5867-48de-a5a7-972ba37ead2e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.150401 4773 scope.go:117] "RemoveContainer" containerID="b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5" Jan 22 12:16:54 crc kubenswrapper[4773]: E0122 12:16:54.157025 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5\": container with ID starting with b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5 not found: ID does not exist" containerID="b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.157082 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5"} err="failed to get container status \"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5\": rpc error: code = NotFound desc = could not find container \"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5\": container with ID starting with b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5 not found: ID does not exist" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.157118 4773 scope.go:117] "RemoveContainer" containerID="c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68" Jan 22 12:16:54 crc kubenswrapper[4773]: E0122 12:16:54.161525 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68\": container with ID starting with c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68 not found: ID does not exist" containerID="c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.161574 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68"} err="failed to get container status \"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68\": rpc error: code = NotFound desc = could not find container \"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68\": container with ID starting with c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68 not found: ID does not exist" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.161602 4773 scope.go:117] "RemoveContainer" containerID="b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.172865 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5"} err="failed to get container status \"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5\": rpc error: code = NotFound desc = could not find container \"b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5\": container with ID starting with b349e19a650803f025bf61bc335eb254eaa4391aec111a8b5bd4121b5a507dd5 not found: ID does not exist" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.172915 4773 scope.go:117] "RemoveContainer" containerID="c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.179807 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.182818 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68"} err="failed to get container status \"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68\": rpc error: code = NotFound desc = could not find container \"c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68\": container with ID starting with c6a0b354c83de3253d70d803a3be4aec0e44061ec8dea64a45ae2ba9231aab68 not found: ID does not exist" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.205394 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.212017 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:54 crc kubenswrapper[4773]: E0122 12:16:54.212644 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api-log" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.212669 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api-log" Jan 22 12:16:54 crc kubenswrapper[4773]: E0122 12:16:54.212715 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.212724 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.212923 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.212940 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" containerName="cinder-api-log" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.214150 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.218867 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.219073 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.219201 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.219485 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.234846 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.234937 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tczc4\" (UniqueName: \"kubernetes.io/projected/04725b89-f1ec-45f8-a69a-5427230da499-kube-api-access-tczc4\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235000 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-scripts\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235020 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235041 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data-custom\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235064 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235090 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04725b89-f1ec-45f8-a69a-5427230da499-etc-machine-id\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235111 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04725b89-f1ec-45f8-a69a-5427230da499-logs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.235157 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-public-tls-certs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356634 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-public-tls-certs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356717 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356798 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tczc4\" (UniqueName: \"kubernetes.io/projected/04725b89-f1ec-45f8-a69a-5427230da499-kube-api-access-tczc4\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356873 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356893 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-scripts\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356917 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data-custom\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356946 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356973 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04725b89-f1ec-45f8-a69a-5427230da499-etc-machine-id\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.356997 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04725b89-f1ec-45f8-a69a-5427230da499-logs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.357483 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04725b89-f1ec-45f8-a69a-5427230da499-logs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.363593 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.365338 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04725b89-f1ec-45f8-a69a-5427230da499-etc-machine-id\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.374591 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-scripts\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.376317 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.377545 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.380333 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-public-tls-certs\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.384735 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tczc4\" (UniqueName: \"kubernetes.io/projected/04725b89-f1ec-45f8-a69a-5427230da499-kube-api-access-tczc4\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.386396 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data-custom\") pod \"cinder-api-0\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.448726 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c54fcb95c-24djg"] Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.552900 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.553165 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.664599 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5jzz\" (UniqueName: \"kubernetes.io/projected/5dd744cf-1206-46a3-8af5-c18012bc1461-kube-api-access-h5jzz\") pod \"5dd744cf-1206-46a3-8af5-c18012bc1461\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.665168 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data\") pod \"5dd744cf-1206-46a3-8af5-c18012bc1461\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.665221 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data-custom\") pod \"5dd744cf-1206-46a3-8af5-c18012bc1461\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.665418 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd744cf-1206-46a3-8af5-c18012bc1461-logs\") pod \"5dd744cf-1206-46a3-8af5-c18012bc1461\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.665515 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-combined-ca-bundle\") pod \"5dd744cf-1206-46a3-8af5-c18012bc1461\" (UID: \"5dd744cf-1206-46a3-8af5-c18012bc1461\") " Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.666330 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dd744cf-1206-46a3-8af5-c18012bc1461-logs" (OuterVolumeSpecName: "logs") pod "5dd744cf-1206-46a3-8af5-c18012bc1461" (UID: "5dd744cf-1206-46a3-8af5-c18012bc1461"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.673434 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dd744cf-1206-46a3-8af5-c18012bc1461-kube-api-access-h5jzz" (OuterVolumeSpecName: "kube-api-access-h5jzz") pod "5dd744cf-1206-46a3-8af5-c18012bc1461" (UID: "5dd744cf-1206-46a3-8af5-c18012bc1461"). InnerVolumeSpecName "kube-api-access-h5jzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.687485 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5dd744cf-1206-46a3-8af5-c18012bc1461" (UID: "5dd744cf-1206-46a3-8af5-c18012bc1461"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.690669 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="174a4c39-5867-48de-a5a7-972ba37ead2e" path="/var/lib/kubelet/pods/174a4c39-5867-48de-a5a7-972ba37ead2e/volumes" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.701967 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c54fcb95c-24djg" event={"ID":"a393de80-9ad0-413e-a2a1-6ee14de22049","Type":"ContainerStarted","Data":"c8ec69fd30ff3f86ecbbe357cf9c7288931466d7ad02d6e9d6eb30e9b599a11b"} Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.710228 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-6f7559d9d9-kljj4" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.153:9696/\": dial tcp 10.217.0.153:9696: connect: connection refused" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.721079 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerStarted","Data":"ce7d9f697c0ff0778f23562ac717ab821b52d38aa5fe4123bd9253ba750babf6"} Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.740051 4773 generic.go:334] "Generic (PLEG): container finished" podID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerID="8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d" exitCode=0 Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.740103 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76746cdbc6-gbqml" event={"ID":"5dd744cf-1206-46a3-8af5-c18012bc1461","Type":"ContainerDied","Data":"8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d"} Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.740500 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76746cdbc6-gbqml" event={"ID":"5dd744cf-1206-46a3-8af5-c18012bc1461","Type":"ContainerDied","Data":"43af7f4401465846d94a89c525bc8a9584deabc96a10d3ab372cd4fc220cc7e4"} Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.740535 4773 scope.go:117] "RemoveContainer" containerID="8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.740206 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76746cdbc6-gbqml" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.775909 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5jzz\" (UniqueName: \"kubernetes.io/projected/5dd744cf-1206-46a3-8af5-c18012bc1461-kube-api-access-h5jzz\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.775944 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.775956 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dd744cf-1206-46a3-8af5-c18012bc1461-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.788045 4773 scope.go:117] "RemoveContainer" containerID="937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.795646 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5dd744cf-1206-46a3-8af5-c18012bc1461" (UID: "5dd744cf-1206-46a3-8af5-c18012bc1461"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.802275 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data" (OuterVolumeSpecName: "config-data") pod "5dd744cf-1206-46a3-8af5-c18012bc1461" (UID: "5dd744cf-1206-46a3-8af5-c18012bc1461"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.849811 4773 scope.go:117] "RemoveContainer" containerID="8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d" Jan 22 12:16:54 crc kubenswrapper[4773]: E0122 12:16:54.863802 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d\": container with ID starting with 8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d not found: ID does not exist" containerID="8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.863884 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d"} err="failed to get container status \"8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d\": rpc error: code = NotFound desc = could not find container \"8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d\": container with ID starting with 8b5b57dda84416c13cf815f4dbd5e41110823d9055dd647ae57e2bcc91185d7d not found: ID does not exist" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.863934 4773 scope.go:117] "RemoveContainer" containerID="937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a" Jan 22 12:16:54 crc kubenswrapper[4773]: E0122 12:16:54.873648 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a\": container with ID starting with 937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a not found: ID does not exist" containerID="937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.873745 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a"} err="failed to get container status \"937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a\": rpc error: code = NotFound desc = could not find container \"937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a\": container with ID starting with 937aa35331c96c0ea6173b73a30dbf047b384131919760e1cf16dd1298060f4a not found: ID does not exist" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.878780 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:54 crc kubenswrapper[4773]: I0122 12:16:54.878808 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dd744cf-1206-46a3-8af5-c18012bc1461-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.142599 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.213281 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-76746cdbc6-gbqml"] Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.221205 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-76746cdbc6-gbqml"] Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.757584 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerStarted","Data":"4889d44fc54ea24d648360889e04777bfff2db34d09ee9a8c03d9f9d7da42ddc"} Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.761964 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04725b89-f1ec-45f8-a69a-5427230da499","Type":"ContainerStarted","Data":"b1397fab45be8a037931e17b75ab4a6ea7251bb45086508b972673aa2a07ba2b"} Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.778179 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c54fcb95c-24djg" event={"ID":"a393de80-9ad0-413e-a2a1-6ee14de22049","Type":"ContainerStarted","Data":"b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37"} Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.778226 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c54fcb95c-24djg" event={"ID":"a393de80-9ad0-413e-a2a1-6ee14de22049","Type":"ContainerStarted","Data":"e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b"} Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.778487 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:16:55 crc kubenswrapper[4773]: I0122 12:16:55.800706 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5c54fcb95c-24djg" podStartSLOduration=3.800675853 podStartE2EDuration="3.800675853s" podCreationTimestamp="2026-01-22 12:16:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:55.797101523 +0000 UTC m=+1323.375217358" watchObservedRunningTime="2026-01-22 12:16:55.800675853 +0000 UTC m=+1323.378791668" Jan 22 12:16:56 crc kubenswrapper[4773]: I0122 12:16:56.703899 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" path="/var/lib/kubelet/pods/5dd744cf-1206-46a3-8af5-c18012bc1461/volumes" Jan 22 12:16:56 crc kubenswrapper[4773]: I0122 12:16:56.792032 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04725b89-f1ec-45f8-a69a-5427230da499","Type":"ContainerStarted","Data":"a2c743ccc8058456082f15d2a6c949e38df6e8c5dbd52d76888c3f1f8d965d57"} Jan 22 12:16:56 crc kubenswrapper[4773]: I0122 12:16:56.838037 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:56 crc kubenswrapper[4773]: I0122 12:16:56.876393 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:16:57 crc kubenswrapper[4773]: I0122 12:16:57.804785 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04725b89-f1ec-45f8-a69a-5427230da499","Type":"ContainerStarted","Data":"f0274aaa2adaf6a4656046810ff93c7b2ab941e940754cbad018d84fab5c25eb"} Jan 22 12:16:57 crc kubenswrapper[4773]: I0122 12:16:57.834749 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.834727921 podStartE2EDuration="3.834727921s" podCreationTimestamp="2026-01-22 12:16:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:16:57.8229528 +0000 UTC m=+1325.401068625" watchObservedRunningTime="2026-01-22 12:16:57.834727921 +0000 UTC m=+1325.412843746" Jan 22 12:16:57 crc kubenswrapper[4773]: I0122 12:16:57.909160 4773 scope.go:117] "RemoveContainer" containerID="55c160d9b90e4d4decd5b742bf1efbaabb75061b99095d3ab0ca3e69eee17f94" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.200672 4773 scope.go:117] "RemoveContainer" containerID="48f00511157f390ce52313094494466bbf163c64892b374e004457aa36fa8c1b" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.234208 4773 scope.go:117] "RemoveContainer" containerID="cea4bf3693e4287495233905bef2b1202e373eb9ce390c2a41f1b8fc9f70dcca" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.236685 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.333461 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.408158 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-89x6f"] Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.408681 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerName="dnsmasq-dns" containerID="cri-o://380677fd1c336ba88e8eeed7c645b7a6151fb3e230f08ceea3c7a50b00a2f40f" gracePeriod=10 Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.513445 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.823741 4773 generic.go:334] "Generic (PLEG): container finished" podID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerID="25d89a3b3bf70ad872eb445f5cf323a49760461d01cfed204a7ee6e53d5bb535" exitCode=0 Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.823827 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7559d9d9-kljj4" event={"ID":"dfbe6420-884f-4bc0-acf3-a518df4580d9","Type":"ContainerDied","Data":"25d89a3b3bf70ad872eb445f5cf323a49760461d01cfed204a7ee6e53d5bb535"} Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.825134 4773 generic.go:334] "Generic (PLEG): container finished" podID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerID="380677fd1c336ba88e8eeed7c645b7a6151fb3e230f08ceea3c7a50b00a2f40f" exitCode=0 Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.825192 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" event={"ID":"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa","Type":"ContainerDied","Data":"380677fd1c336ba88e8eeed7c645b7a6151fb3e230f08ceea3c7a50b00a2f40f"} Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.826904 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerStarted","Data":"71d82d0b0f6b51c9792097ab84e90d31299944e3e6dc27933891d4c8d7fdeb9c"} Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.827222 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.853721 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.5949816610000003 podStartE2EDuration="8.853700305s" podCreationTimestamp="2026-01-22 12:16:50 +0000 UTC" firstStartedPulling="2026-01-22 12:16:51.689705132 +0000 UTC m=+1319.267820957" lastFinishedPulling="2026-01-22 12:16:56.948423776 +0000 UTC m=+1324.526539601" observedRunningTime="2026-01-22 12:16:58.848327014 +0000 UTC m=+1326.426442849" watchObservedRunningTime="2026-01-22 12:16:58.853700305 +0000 UTC m=+1326.431816130" Jan 22 12:16:58 crc kubenswrapper[4773]: I0122 12:16:58.894857 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.293409 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.305152 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.393960 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chh2t\" (UniqueName: \"kubernetes.io/projected/dfbe6420-884f-4bc0-acf3-a518df4580d9-kube-api-access-chh2t\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394031 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7z88\" (UniqueName: \"kubernetes.io/projected/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-kube-api-access-g7z88\") pod \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394088 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-public-tls-certs\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394159 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-ovndb-tls-certs\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394207 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-config\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394277 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-internal-tls-certs\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394334 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-nb\") pod \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394419 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-httpd-config\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394446 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-svc\") pod \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394506 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-combined-ca-bundle\") pod \"dfbe6420-884f-4bc0-acf3-a518df4580d9\" (UID: \"dfbe6420-884f-4bc0-acf3-a518df4580d9\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394561 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-config\") pod \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394606 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-sb\") pod \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.394663 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-swift-storage-0\") pod \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\" (UID: \"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa\") " Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.402542 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-kube-api-access-g7z88" (OuterVolumeSpecName: "kube-api-access-g7z88") pod "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" (UID: "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa"). InnerVolumeSpecName "kube-api-access-g7z88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.402573 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.408780 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfbe6420-884f-4bc0-acf3-a518df4580d9-kube-api-access-chh2t" (OuterVolumeSpecName: "kube-api-access-chh2t") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "kube-api-access-chh2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.496757 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7z88\" (UniqueName: \"kubernetes.io/projected/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-kube-api-access-g7z88\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.498263 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.498393 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chh2t\" (UniqueName: \"kubernetes.io/projected/dfbe6420-884f-4bc0-acf3-a518df4580d9-kube-api-access-chh2t\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.497999 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.501677 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.503087 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" (UID: "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.506185 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" (UID: "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.507928 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" (UID: "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.513600 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.514115 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-config" (OuterVolumeSpecName: "config") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.514827 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" (UID: "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.515142 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "dfbe6420-884f-4bc0-acf3-a518df4580d9" (UID: "dfbe6420-884f-4bc0-acf3-a518df4580d9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.527179 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-config" (OuterVolumeSpecName: "config") pod "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" (UID: "3af85fdc-c159-4eb5-92dd-bf1ef790a5fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600742 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600893 4773 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600911 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600923 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600935 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600947 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600957 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfbe6420-884f-4bc0-acf3-a518df4580d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600967 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600976 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.600986 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.840081 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f7559d9d9-kljj4" event={"ID":"dfbe6420-884f-4bc0-acf3-a518df4580d9","Type":"ContainerDied","Data":"5c00b20e125090526df9ad81f3ee67ff8245895e620956ac6e276d63146bdabf"} Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.840154 4773 scope.go:117] "RemoveContainer" containerID="041e1a8a7930cad7131da3d75b95ab2b629ba8232a60301999e4dad3d131e952" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.840157 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f7559d9d9-kljj4" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.850251 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="cinder-scheduler" containerID="cri-o://ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6" gracePeriod=30 Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.850438 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.851353 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-89x6f" event={"ID":"3af85fdc-c159-4eb5-92dd-bf1ef790a5fa","Type":"ContainerDied","Data":"798d4fe494f5dce1062f4f89710bf956259a4604f18be3d8b3e0b209c557d755"} Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.852748 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.852840 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="probe" containerID="cri-o://acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff" gracePeriod=30 Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.881887 4773 scope.go:117] "RemoveContainer" containerID="25d89a3b3bf70ad872eb445f5cf323a49760461d01cfed204a7ee6e53d5bb535" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.891704 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6f7559d9d9-kljj4"] Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.911013 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6f7559d9d9-kljj4"] Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.914208 4773 scope.go:117] "RemoveContainer" containerID="380677fd1c336ba88e8eeed7c645b7a6151fb3e230f08ceea3c7a50b00a2f40f" Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.922368 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-89x6f"] Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.928962 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-89x6f"] Jan 22 12:16:59 crc kubenswrapper[4773]: I0122 12:16:59.933402 4773 scope.go:117] "RemoveContainer" containerID="3413e819aefbdd88c96605e9c48eef0f8d140caea90e150ffb78ac99c8775407" Jan 22 12:17:00 crc kubenswrapper[4773]: I0122 12:17:00.671088 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" path="/var/lib/kubelet/pods/3af85fdc-c159-4eb5-92dd-bf1ef790a5fa/volumes" Jan 22 12:17:00 crc kubenswrapper[4773]: I0122 12:17:00.672689 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" path="/var/lib/kubelet/pods/dfbe6420-884f-4bc0-acf3-a518df4580d9/volumes" Jan 22 12:17:00 crc kubenswrapper[4773]: I0122 12:17:00.862713 4773 generic.go:334] "Generic (PLEG): container finished" podID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerID="acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff" exitCode=0 Jan 22 12:17:00 crc kubenswrapper[4773]: I0122 12:17:00.862775 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4","Type":"ContainerDied","Data":"acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff"} Jan 22 12:17:03 crc kubenswrapper[4773]: I0122 12:17:03.151875 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.075070 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.075139 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.387734 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 12:17:04 crc kubenswrapper[4773]: E0122 12:17:04.388269 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerName="dnsmasq-dns" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388308 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerName="dnsmasq-dns" Jan 22 12:17:04 crc kubenswrapper[4773]: E0122 12:17:04.388332 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-httpd" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388341 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-httpd" Jan 22 12:17:04 crc kubenswrapper[4773]: E0122 12:17:04.388372 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388381 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api" Jan 22 12:17:04 crc kubenswrapper[4773]: E0122 12:17:04.388395 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerName="init" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388403 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerName="init" Jan 22 12:17:04 crc kubenswrapper[4773]: E0122 12:17:04.388416 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-api" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388423 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-api" Jan 22 12:17:04 crc kubenswrapper[4773]: E0122 12:17:04.388438 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api-log" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388446 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api-log" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388682 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3af85fdc-c159-4eb5-92dd-bf1ef790a5fa" containerName="dnsmasq-dns" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388699 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-httpd" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388710 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfbe6420-884f-4bc0-acf3-a518df4580d9" containerName="neutron-api" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388723 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.388740 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5dd744cf-1206-46a3-8af5-c18012bc1461" containerName="barbican-api-log" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.389524 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.393441 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.393594 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-zbqhb" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.398567 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.398659 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.495353 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.495516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2z2p\" (UniqueName: \"kubernetes.io/projected/6c38a538-da2d-4097-9851-de6f8f2106c1-kube-api-access-r2z2p\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.495554 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.495632 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config-secret\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.597463 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2z2p\" (UniqueName: \"kubernetes.io/projected/6c38a538-da2d-4097-9851-de6f8f2106c1-kube-api-access-r2z2p\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.597497 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.597533 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config-secret\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.597569 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.599243 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.604008 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config-secret\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.604497 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.617762 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2z2p\" (UniqueName: \"kubernetes.io/projected/6c38a538-da2d-4097-9851-de6f8f2106c1-kube-api-access-r2z2p\") pod \"openstackclient\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.717576 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.872768 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.903077 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvbvl\" (UniqueName: \"kubernetes.io/projected/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-kube-api-access-bvbvl\") pod \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.903816 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data\") pod \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.904034 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-combined-ca-bundle\") pod \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.904486 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data-custom\") pod \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.904666 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-scripts\") pod \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.904717 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-etc-machine-id\") pod \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\" (UID: \"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4\") " Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.910871 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" (UID: "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.911827 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-kube-api-access-bvbvl" (OuterVolumeSpecName: "kube-api-access-bvbvl") pod "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" (UID: "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4"). InnerVolumeSpecName "kube-api-access-bvbvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.912224 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" (UID: "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.932110 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-scripts" (OuterVolumeSpecName: "scripts") pod "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" (UID: "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.942051 4773 generic.go:334] "Generic (PLEG): container finished" podID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerID="ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6" exitCode=0 Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.942122 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4","Type":"ContainerDied","Data":"ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6"} Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.942156 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"f5258fc2-7a55-4af3-bc0a-30b7e5c253c4","Type":"ContainerDied","Data":"38c6d09e71eedcd5e8eb0353197ae6895cfbc3c658a8da46c011f2b59f6629fd"} Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.942175 4773 scope.go:117] "RemoveContainer" containerID="acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.942236 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:17:04 crc kubenswrapper[4773]: I0122 12:17:04.989923 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" (UID: "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.002171 4773 scope.go:117] "RemoveContainer" containerID="ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.012712 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.012746 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.012756 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.012766 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.012775 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvbvl\" (UniqueName: \"kubernetes.io/projected/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-kube-api-access-bvbvl\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.034926 4773 scope.go:117] "RemoveContainer" containerID="acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff" Jan 22 12:17:05 crc kubenswrapper[4773]: E0122 12:17:05.038394 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff\": container with ID starting with acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff not found: ID does not exist" containerID="acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.038425 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff"} err="failed to get container status \"acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff\": rpc error: code = NotFound desc = could not find container \"acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff\": container with ID starting with acd895f367c0e75633b6998dc82005ce7442af55265a9c02df624f5e1c913fff not found: ID does not exist" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.038448 4773 scope.go:117] "RemoveContainer" containerID="ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6" Jan 22 12:17:05 crc kubenswrapper[4773]: E0122 12:17:05.045126 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6\": container with ID starting with ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6 not found: ID does not exist" containerID="ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.045161 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6"} err="failed to get container status \"ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6\": rpc error: code = NotFound desc = could not find container \"ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6\": container with ID starting with ec433ceddf2873cbfb2c2e6d2f400271bfb699b468ca4b58d7f51d90bada96a6 not found: ID does not exist" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.090765 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data" (OuterVolumeSpecName: "config-data") pod "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" (UID: "f5258fc2-7a55-4af3-bc0a-30b7e5c253c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.114532 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.284171 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.293236 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.324847 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:17:05 crc kubenswrapper[4773]: E0122 12:17:05.325385 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="probe" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.325405 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="probe" Jan 22 12:17:05 crc kubenswrapper[4773]: E0122 12:17:05.325431 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="cinder-scheduler" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.325438 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="cinder-scheduler" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.325614 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="probe" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.325627 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" containerName="cinder-scheduler" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.326557 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.329721 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 12:17:05 crc kubenswrapper[4773]: W0122 12:17:05.334487 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c38a538_da2d_4097_9851_de6f8f2106c1.slice/crio-7e98a7f4f46ed2717360d13cf3d2f4cec10ad5e791a0e422be5c2455f62ac63e WatchSource:0}: Error finding container 7e98a7f4f46ed2717360d13cf3d2f4cec10ad5e791a0e422be5c2455f62ac63e: Status 404 returned error can't find the container with id 7e98a7f4f46ed2717360d13cf3d2f4cec10ad5e791a0e422be5c2455f62ac63e Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.335400 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.375349 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.420001 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.420267 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.420500 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-scripts\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.420693 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwbjb\" (UniqueName: \"kubernetes.io/projected/aed7f398-6b73-4830-aa88-db87be2b99a0-kube-api-access-hwbjb\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.421078 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aed7f398-6b73-4830-aa88-db87be2b99a0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.421277 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.525181 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.525992 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.526035 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-scripts\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.526087 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwbjb\" (UniqueName: \"kubernetes.io/projected/aed7f398-6b73-4830-aa88-db87be2b99a0-kube-api-access-hwbjb\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.526223 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aed7f398-6b73-4830-aa88-db87be2b99a0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.526311 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.526498 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aed7f398-6b73-4830-aa88-db87be2b99a0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.531189 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.531227 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.531757 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.533987 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-scripts\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.551948 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwbjb\" (UniqueName: \"kubernetes.io/projected/aed7f398-6b73-4830-aa88-db87be2b99a0-kube-api-access-hwbjb\") pod \"cinder-scheduler-0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.647610 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:17:05 crc kubenswrapper[4773]: I0122 12:17:05.959493 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6c38a538-da2d-4097-9851-de6f8f2106c1","Type":"ContainerStarted","Data":"7e98a7f4f46ed2717360d13cf3d2f4cec10ad5e791a0e422be5c2455f62ac63e"} Jan 22 12:17:06 crc kubenswrapper[4773]: I0122 12:17:06.159487 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:17:06 crc kubenswrapper[4773]: I0122 12:17:06.669224 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5258fc2-7a55-4af3-bc0a-30b7e5c253c4" path="/var/lib/kubelet/pods/f5258fc2-7a55-4af3-bc0a-30b7e5c253c4/volumes" Jan 22 12:17:06 crc kubenswrapper[4773]: I0122 12:17:06.975437 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"aed7f398-6b73-4830-aa88-db87be2b99a0","Type":"ContainerStarted","Data":"e9caab10710c70bddc60aeb018b91f13429ff97d8ea1f39287b8d4e3ca62e848"} Jan 22 12:17:07 crc kubenswrapper[4773]: I0122 12:17:07.379697 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 12:17:07 crc kubenswrapper[4773]: I0122 12:17:07.990106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"aed7f398-6b73-4830-aa88-db87be2b99a0","Type":"ContainerStarted","Data":"231d892f6fe508e5896980a2b5938e377e1f57b840196329664f9ef1b5b26d99"} Jan 22 12:17:07 crc kubenswrapper[4773]: I0122 12:17:07.990458 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"aed7f398-6b73-4830-aa88-db87be2b99a0","Type":"ContainerStarted","Data":"1479304cf57df7fc13f187bc0a84e2fe6c7845f988d1d96f82b74355bc8045dd"} Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.024857 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6f844cff7c-gmg8h"] Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.028552 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.032112 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.032352 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.032475 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.055371 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6f844cff7c-gmg8h"] Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.058009 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.057983403 podStartE2EDuration="4.057983403s" podCreationTimestamp="2026-01-22 12:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:17:09.046349646 +0000 UTC m=+1336.624465471" watchObservedRunningTime="2026-01-22 12:17:09.057983403 +0000 UTC m=+1336.636099228" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112594 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-run-httpd\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112659 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-internal-tls-certs\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112698 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-log-httpd\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112721 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-public-tls-certs\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112820 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-config-data\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112880 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-etc-swift\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112912 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-combined-ca-bundle\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.112946 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w67f\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-kube-api-access-6w67f\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214608 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-etc-swift\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214679 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-combined-ca-bundle\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214732 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w67f\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-kube-api-access-6w67f\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214822 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-run-httpd\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214857 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-internal-tls-certs\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214891 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-log-httpd\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.214918 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-public-tls-certs\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.215020 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-config-data\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.216702 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-run-httpd\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.216736 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-log-httpd\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.222418 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-internal-tls-certs\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.224108 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-config-data\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.226154 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-combined-ca-bundle\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.226319 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-public-tls-certs\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.227455 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-etc-swift\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.236344 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w67f\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-kube-api-access-6w67f\") pod \"swift-proxy-6f844cff7c-gmg8h\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.351159 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.634679 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.634967 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-central-agent" containerID="cri-o://869e673da9e09061daf56b6a4605ccdcc3169f4e4c75425ecafc6f9b435e6c16" gracePeriod=30 Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.635114 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="proxy-httpd" containerID="cri-o://71d82d0b0f6b51c9792097ab84e90d31299944e3e6dc27933891d4c8d7fdeb9c" gracePeriod=30 Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.635157 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="sg-core" containerID="cri-o://4889d44fc54ea24d648360889e04777bfff2db34d09ee9a8c03d9f9d7da42ddc" gracePeriod=30 Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.635189 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-notification-agent" containerID="cri-o://ce7d9f697c0ff0778f23562ac717ab821b52d38aa5fe4123bd9253ba750babf6" gracePeriod=30 Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.745267 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.167:3000/\": read tcp 10.217.0.2:46500->10.217.0.167:3000: read: connection reset by peer" Jan 22 12:17:09 crc kubenswrapper[4773]: I0122 12:17:09.953161 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6f844cff7c-gmg8h"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.030586 4773 generic.go:334] "Generic (PLEG): container finished" podID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerID="71d82d0b0f6b51c9792097ab84e90d31299944e3e6dc27933891d4c8d7fdeb9c" exitCode=0 Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.030629 4773 generic.go:334] "Generic (PLEG): container finished" podID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerID="4889d44fc54ea24d648360889e04777bfff2db34d09ee9a8c03d9f9d7da42ddc" exitCode=2 Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.031859 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerDied","Data":"71d82d0b0f6b51c9792097ab84e90d31299944e3e6dc27933891d4c8d7fdeb9c"} Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.031900 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerDied","Data":"4889d44fc54ea24d648360889e04777bfff2db34d09ee9a8c03d9f9d7da42ddc"} Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.352664 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-pdmmj"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.359041 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.373981 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pdmmj"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.448360 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-operator-scripts\") pod \"nova-api-db-create-pdmmj\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.451714 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-sqv54"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.451723 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stbwg\" (UniqueName: \"kubernetes.io/projected/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-kube-api-access-stbwg\") pod \"nova-api-db-create-pdmmj\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.452903 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.471720 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8cf3-account-create-update-qqn6s"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.473203 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.478621 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.481171 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-sqv54"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.498703 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-qqn6s"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.553554 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stbwg\" (UniqueName: \"kubernetes.io/projected/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-kube-api-access-stbwg\") pod \"nova-api-db-create-pdmmj\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.553615 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8845496-a1b9-4d8e-b60c-b657ceb619f5-operator-scripts\") pod \"nova-api-8cf3-account-create-update-qqn6s\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.553694 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-operator-scripts\") pod \"nova-api-db-create-pdmmj\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.553729 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56ptz\" (UniqueName: \"kubernetes.io/projected/c8845496-a1b9-4d8e-b60c-b657ceb619f5-kube-api-access-56ptz\") pod \"nova-api-8cf3-account-create-update-qqn6s\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.553971 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-operator-scripts\") pod \"nova-cell0-db-create-sqv54\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.554156 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjxcz\" (UniqueName: \"kubernetes.io/projected/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-kube-api-access-jjxcz\") pod \"nova-cell0-db-create-sqv54\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.554602 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-operator-scripts\") pod \"nova-api-db-create-pdmmj\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.576301 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stbwg\" (UniqueName: \"kubernetes.io/projected/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-kube-api-access-stbwg\") pod \"nova-api-db-create-pdmmj\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.648412 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.650832 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-5xcwn"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.652267 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.656199 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjxcz\" (UniqueName: \"kubernetes.io/projected/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-kube-api-access-jjxcz\") pod \"nova-cell0-db-create-sqv54\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.656333 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8845496-a1b9-4d8e-b60c-b657ceb619f5-operator-scripts\") pod \"nova-api-8cf3-account-create-update-qqn6s\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.656406 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56ptz\" (UniqueName: \"kubernetes.io/projected/c8845496-a1b9-4d8e-b60c-b657ceb619f5-kube-api-access-56ptz\") pod \"nova-api-8cf3-account-create-update-qqn6s\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.656450 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-operator-scripts\") pod \"nova-cell0-db-create-sqv54\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.657488 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-operator-scripts\") pod \"nova-cell0-db-create-sqv54\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.658554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8845496-a1b9-4d8e-b60c-b657ceb619f5-operator-scripts\") pod \"nova-api-8cf3-account-create-update-qqn6s\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.675128 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-ddfzr"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.676200 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5xcwn"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.676274 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.682975 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.684783 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-ddfzr"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.685757 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.687149 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjxcz\" (UniqueName: \"kubernetes.io/projected/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-kube-api-access-jjxcz\") pod \"nova-cell0-db-create-sqv54\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.698067 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56ptz\" (UniqueName: \"kubernetes.io/projected/c8845496-a1b9-4d8e-b60c-b657ceb619f5-kube-api-access-56ptz\") pod \"nova-api-8cf3-account-create-update-qqn6s\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.758804 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-operator-scripts\") pod \"nova-cell0-eb71-account-create-update-ddfzr\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.758879 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdjhq\" (UniqueName: \"kubernetes.io/projected/5e274f9a-f487-4a76-864b-38f0b4e80ed9-kube-api-access-wdjhq\") pod \"nova-cell1-db-create-5xcwn\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.758901 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jpgn\" (UniqueName: \"kubernetes.io/projected/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-kube-api-access-9jpgn\") pod \"nova-cell0-eb71-account-create-update-ddfzr\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.758975 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e274f9a-f487-4a76-864b-38f0b4e80ed9-operator-scripts\") pod \"nova-cell1-db-create-5xcwn\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.780084 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.795035 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.862139 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-operator-scripts\") pod \"nova-cell0-eb71-account-create-update-ddfzr\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.862251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdjhq\" (UniqueName: \"kubernetes.io/projected/5e274f9a-f487-4a76-864b-38f0b4e80ed9-kube-api-access-wdjhq\") pod \"nova-cell1-db-create-5xcwn\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.862297 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jpgn\" (UniqueName: \"kubernetes.io/projected/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-kube-api-access-9jpgn\") pod \"nova-cell0-eb71-account-create-update-ddfzr\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.862382 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e274f9a-f487-4a76-864b-38f0b4e80ed9-operator-scripts\") pod \"nova-cell1-db-create-5xcwn\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.863515 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e274f9a-f487-4a76-864b-38f0b4e80ed9-operator-scripts\") pod \"nova-cell1-db-create-5xcwn\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.864195 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-operator-scripts\") pod \"nova-cell0-eb71-account-create-update-ddfzr\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.872817 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8744-account-create-update-m2dw9"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.874946 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.879450 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.881181 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-m2dw9"] Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.881340 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdjhq\" (UniqueName: \"kubernetes.io/projected/5e274f9a-f487-4a76-864b-38f0b4e80ed9-kube-api-access-wdjhq\") pod \"nova-cell1-db-create-5xcwn\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.889208 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jpgn\" (UniqueName: \"kubernetes.io/projected/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-kube-api-access-9jpgn\") pod \"nova-cell0-eb71-account-create-update-ddfzr\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.964739 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05299183-b5cc-4655-b5d2-6a89d1f5c138-operator-scripts\") pod \"nova-cell1-8744-account-create-update-m2dw9\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.964830 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rs6v\" (UniqueName: \"kubernetes.io/projected/05299183-b5cc-4655-b5d2-6a89d1f5c138-kube-api-access-7rs6v\") pod \"nova-cell1-8744-account-create-update-m2dw9\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:10 crc kubenswrapper[4773]: I0122 12:17:10.976149 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.044765 4773 generic.go:334] "Generic (PLEG): container finished" podID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerID="869e673da9e09061daf56b6a4605ccdcc3169f4e4c75425ecafc6f9b435e6c16" exitCode=0 Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.044943 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerDied","Data":"869e673da9e09061daf56b6a4605ccdcc3169f4e4c75425ecafc6f9b435e6c16"} Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.067618 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05299183-b5cc-4655-b5d2-6a89d1f5c138-operator-scripts\") pod \"nova-cell1-8744-account-create-update-m2dw9\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.067715 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rs6v\" (UniqueName: \"kubernetes.io/projected/05299183-b5cc-4655-b5d2-6a89d1f5c138-kube-api-access-7rs6v\") pod \"nova-cell1-8744-account-create-update-m2dw9\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.068412 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05299183-b5cc-4655-b5d2-6a89d1f5c138-operator-scripts\") pod \"nova-cell1-8744-account-create-update-m2dw9\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.085680 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rs6v\" (UniqueName: \"kubernetes.io/projected/05299183-b5cc-4655-b5d2-6a89d1f5c138-kube-api-access-7rs6v\") pod \"nova-cell1-8744-account-create-update-m2dw9\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.085707 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:11 crc kubenswrapper[4773]: I0122 12:17:11.304108 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:12 crc kubenswrapper[4773]: I0122 12:17:12.061405 4773 generic.go:334] "Generic (PLEG): container finished" podID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerID="ce7d9f697c0ff0778f23562ac717ab821b52d38aa5fe4123bd9253ba750babf6" exitCode=0 Jan 22 12:17:12 crc kubenswrapper[4773]: I0122 12:17:12.061496 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerDied","Data":"ce7d9f697c0ff0778f23562ac717ab821b52d38aa5fe4123bd9253ba750babf6"} Jan 22 12:17:15 crc kubenswrapper[4773]: I0122 12:17:15.928391 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.112698 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6f844cff7c-gmg8h" event={"ID":"525ebe62-dc27-40fa-97a9-31346c6145a1","Type":"ContainerStarted","Data":"88b310776544f69fc2ad3ad17a6ca12d59f34c72942fa6908b7641b40484281d"} Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.297907 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.399895 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-sg-core-conf-yaml\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.399927 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-combined-ca-bundle\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.400018 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc7mv\" (UniqueName: \"kubernetes.io/projected/f568dc9b-1245-4e94-9b82-45b619c401ad-kube-api-access-jc7mv\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.400067 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-run-httpd\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.400903 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-config-data\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.400655 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.400948 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-scripts\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.401018 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-log-httpd\") pod \"f568dc9b-1245-4e94-9b82-45b619c401ad\" (UID: \"f568dc9b-1245-4e94-9b82-45b619c401ad\") " Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.401690 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.402299 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.402318 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f568dc9b-1245-4e94-9b82-45b619c401ad-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.406619 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-scripts" (OuterVolumeSpecName: "scripts") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.413061 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f568dc9b-1245-4e94-9b82-45b619c401ad-kube-api-access-jc7mv" (OuterVolumeSpecName: "kube-api-access-jc7mv") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "kube-api-access-jc7mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.452334 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.503797 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.504111 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc7mv\" (UniqueName: \"kubernetes.io/projected/f568dc9b-1245-4e94-9b82-45b619c401ad-kube-api-access-jc7mv\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.504215 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.589515 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-config-data" (OuterVolumeSpecName: "config-data") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.595594 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f568dc9b-1245-4e94-9b82-45b619c401ad" (UID: "f568dc9b-1245-4e94-9b82-45b619c401ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.606340 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.606375 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f568dc9b-1245-4e94-9b82-45b619c401ad-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.801343 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-sqv54"] Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.823494 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-m2dw9"] Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.831059 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-ddfzr"] Jan 22 12:17:16 crc kubenswrapper[4773]: W0122 12:17:16.839895 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62ef8b2b_1672_4050_9d1a_0dbdef69fcae.slice/crio-a7005ab948d9ca12ca91892ead4a2abdfe06ee19efb5be4d2291d49baadb4bc6 WatchSource:0}: Error finding container a7005ab948d9ca12ca91892ead4a2abdfe06ee19efb5be4d2291d49baadb4bc6: Status 404 returned error can't find the container with id a7005ab948d9ca12ca91892ead4a2abdfe06ee19efb5be4d2291d49baadb4bc6 Jan 22 12:17:16 crc kubenswrapper[4773]: I0122 12:17:16.849343 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5xcwn"] Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.023407 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pdmmj"] Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.056834 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-qqn6s"] Jan 22 12:17:17 crc kubenswrapper[4773]: W0122 12:17:17.075137 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8845496_a1b9_4d8e_b60c_b657ceb619f5.slice/crio-a54d928f1f19923f0dce277d4c4cc83b12d17b022ce630d2c7b302611e1e9880 WatchSource:0}: Error finding container a54d928f1f19923f0dce277d4c4cc83b12d17b022ce630d2c7b302611e1e9880: Status 404 returned error can't find the container with id a54d928f1f19923f0dce277d4c4cc83b12d17b022ce630d2c7b302611e1e9880 Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.125395 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5xcwn" event={"ID":"5e274f9a-f487-4a76-864b-38f0b4e80ed9","Type":"ContainerStarted","Data":"507929d18f89c0b164ab0b2d2aa59779d7fed3911df37db3256ec8e2c3403724"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.130376 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6f844cff7c-gmg8h" event={"ID":"525ebe62-dc27-40fa-97a9-31346c6145a1","Type":"ContainerStarted","Data":"aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.130442 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6f844cff7c-gmg8h" event={"ID":"525ebe62-dc27-40fa-97a9-31346c6145a1","Type":"ContainerStarted","Data":"521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.130583 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.143109 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f568dc9b-1245-4e94-9b82-45b619c401ad","Type":"ContainerDied","Data":"efb23cb1355639f2cccf8f5a54bd8e31b1a9f8d9503eb10035f051f2f1eb80c7"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.143171 4773 scope.go:117] "RemoveContainer" containerID="71d82d0b0f6b51c9792097ab84e90d31299944e3e6dc27933891d4c8d7fdeb9c" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.143208 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.144960 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" event={"ID":"62ef8b2b-1672-4050-9d1a-0dbdef69fcae","Type":"ContainerStarted","Data":"a7005ab948d9ca12ca91892ead4a2abdfe06ee19efb5be4d2291d49baadb4bc6"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.146794 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" event={"ID":"05299183-b5cc-4655-b5d2-6a89d1f5c138","Type":"ContainerStarted","Data":"2ea11d7fb758f114fe1ff5a749557c1adeb4fa3438c369900e2db72488636232"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.148777 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6c38a538-da2d-4097-9851-de6f8f2106c1","Type":"ContainerStarted","Data":"c508023fb186e6df6d8bd07919aae57bb3fd29d1d4aa3d274d27c206409ca260"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.149856 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sqv54" event={"ID":"f8bbf2c9-35f6-4c9e-a959-6977302e59aa","Type":"ContainerStarted","Data":"d3a6972ff767caca928f6b53681b5f51528af04c6b1357da122cd501e03de520"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.162188 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pdmmj" event={"ID":"5ba7d0b8-1d6d-4630-b574-7d0e55409e25","Type":"ContainerStarted","Data":"6a1eb4b0858a82267852de4dc5a67e381f9f095841cda418ccb53a7f51309733"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.164300 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" event={"ID":"c8845496-a1b9-4d8e-b60c-b657ceb619f5","Type":"ContainerStarted","Data":"a54d928f1f19923f0dce277d4c4cc83b12d17b022ce630d2c7b302611e1e9880"} Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.188155 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6f844cff7c-gmg8h" podStartSLOduration=9.188137367 podStartE2EDuration="9.188137367s" podCreationTimestamp="2026-01-22 12:17:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:17:17.160820459 +0000 UTC m=+1344.738936284" watchObservedRunningTime="2026-01-22 12:17:17.188137367 +0000 UTC m=+1344.766253192" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.189511 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.388852263 podStartE2EDuration="13.189502585s" podCreationTimestamp="2026-01-22 12:17:04 +0000 UTC" firstStartedPulling="2026-01-22 12:17:05.339267368 +0000 UTC m=+1332.917383193" lastFinishedPulling="2026-01-22 12:17:16.13991769 +0000 UTC m=+1343.718033515" observedRunningTime="2026-01-22 12:17:17.174343329 +0000 UTC m=+1344.752459154" watchObservedRunningTime="2026-01-22 12:17:17.189502585 +0000 UTC m=+1344.767618400" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.201472 4773 scope.go:117] "RemoveContainer" containerID="4889d44fc54ea24d648360889e04777bfff2db34d09ee9a8c03d9f9d7da42ddc" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.251351 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.262904 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.264080 4773 scope.go:117] "RemoveContainer" containerID="ce7d9f697c0ff0778f23562ac717ab821b52d38aa5fe4123bd9253ba750babf6" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.308802 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:17 crc kubenswrapper[4773]: E0122 12:17:17.316764 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="sg-core" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.316782 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="sg-core" Jan 22 12:17:17 crc kubenswrapper[4773]: E0122 12:17:17.316800 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-central-agent" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.316808 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-central-agent" Jan 22 12:17:17 crc kubenswrapper[4773]: E0122 12:17:17.316826 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-notification-agent" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.316835 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-notification-agent" Jan 22 12:17:17 crc kubenswrapper[4773]: E0122 12:17:17.316862 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="proxy-httpd" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.316868 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="proxy-httpd" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.317209 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-notification-agent" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.317241 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="sg-core" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.317257 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="ceilometer-central-agent" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.317274 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" containerName="proxy-httpd" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.327590 4773 scope.go:117] "RemoveContainer" containerID="869e673da9e09061daf56b6a4605ccdcc3169f4e4c75425ecafc6f9b435e6c16" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.330229 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.330377 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.332137 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.332430 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555576 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-run-httpd\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555642 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-config-data\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555674 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555704 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555775 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp5g6\" (UniqueName: \"kubernetes.io/projected/241ba95d-13d5-49b7-8616-8f227297154d-kube-api-access-tp5g6\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555804 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-log-httpd\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.555825 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-scripts\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.656990 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.657066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.657133 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp5g6\" (UniqueName: \"kubernetes.io/projected/241ba95d-13d5-49b7-8616-8f227297154d-kube-api-access-tp5g6\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.657171 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-log-httpd\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.657201 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-scripts\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.657293 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-run-httpd\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.657341 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-config-data\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.659395 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-run-httpd\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.670587 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-log-httpd\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.671332 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.674721 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-scripts\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.674924 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-config-data\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.720371 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.723580 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp5g6\" (UniqueName: \"kubernetes.io/projected/241ba95d-13d5-49b7-8616-8f227297154d-kube-api-access-tp5g6\") pod \"ceilometer-0\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " pod="openstack/ceilometer-0" Jan 22 12:17:17 crc kubenswrapper[4773]: I0122 12:17:17.963126 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.179868 4773 generic.go:334] "Generic (PLEG): container finished" podID="5ba7d0b8-1d6d-4630-b574-7d0e55409e25" containerID="d4ccce0b1dc1cb12e6fbde6555905a3054bf04d220364144813fb3c66dbd8f9a" exitCode=0 Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.180027 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pdmmj" event={"ID":"5ba7d0b8-1d6d-4630-b574-7d0e55409e25","Type":"ContainerDied","Data":"d4ccce0b1dc1cb12e6fbde6555905a3054bf04d220364144813fb3c66dbd8f9a"} Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.182263 4773 generic.go:334] "Generic (PLEG): container finished" podID="c8845496-a1b9-4d8e-b60c-b657ceb619f5" containerID="b7af3d314ac833c980770e1aea6c18199d9bb9e943d9d465a4255d04163f0312" exitCode=0 Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.182314 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" event={"ID":"c8845496-a1b9-4d8e-b60c-b657ceb619f5","Type":"ContainerDied","Data":"b7af3d314ac833c980770e1aea6c18199d9bb9e943d9d465a4255d04163f0312"} Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.185904 4773 generic.go:334] "Generic (PLEG): container finished" podID="62ef8b2b-1672-4050-9d1a-0dbdef69fcae" containerID="74b83a04fdad0dadc32143fbca6d09d286f812011493ed35ba77825a03b9f5c1" exitCode=0 Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.186029 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" event={"ID":"62ef8b2b-1672-4050-9d1a-0dbdef69fcae","Type":"ContainerDied","Data":"74b83a04fdad0dadc32143fbca6d09d286f812011493ed35ba77825a03b9f5c1"} Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.187747 4773 generic.go:334] "Generic (PLEG): container finished" podID="05299183-b5cc-4655-b5d2-6a89d1f5c138" containerID="14024674f87e3ede95f2eb32c3831caf7330b555c63eb56d5c802f11161870a3" exitCode=0 Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.187863 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" event={"ID":"05299183-b5cc-4655-b5d2-6a89d1f5c138","Type":"ContainerDied","Data":"14024674f87e3ede95f2eb32c3831caf7330b555c63eb56d5c802f11161870a3"} Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.208964 4773 generic.go:334] "Generic (PLEG): container finished" podID="f8bbf2c9-35f6-4c9e-a959-6977302e59aa" containerID="029d6b50f01ce8fa59024b0d1eaad9c1f17eb7a636625bf9bcd2dacaa13828f7" exitCode=0 Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.210408 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sqv54" event={"ID":"f8bbf2c9-35f6-4c9e-a959-6977302e59aa","Type":"ContainerDied","Data":"029d6b50f01ce8fa59024b0d1eaad9c1f17eb7a636625bf9bcd2dacaa13828f7"} Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.219892 4773 generic.go:334] "Generic (PLEG): container finished" podID="5e274f9a-f487-4a76-864b-38f0b4e80ed9" containerID="aab3f6dfc1780bf96a3c69e8b0c5eb559ab8d37f13dfa4bbf2394a578877f335" exitCode=0 Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.221180 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5xcwn" event={"ID":"5e274f9a-f487-4a76-864b-38f0b4e80ed9","Type":"ContainerDied","Data":"aab3f6dfc1780bf96a3c69e8b0c5eb559ab8d37f13dfa4bbf2394a578877f335"} Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.221777 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.486349 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:18 crc kubenswrapper[4773]: I0122 12:17:18.669258 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f568dc9b-1245-4e94-9b82-45b619c401ad" path="/var/lib/kubelet/pods/f568dc9b-1245-4e94-9b82-45b619c401ad/volumes" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.230169 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerStarted","Data":"15d1f96339c3c2f6278fbfda10282b7a88813d3211b8e26b86b86801e9e1ece8"} Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.690142 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.707611 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stbwg\" (UniqueName: \"kubernetes.io/projected/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-kube-api-access-stbwg\") pod \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.707837 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-operator-scripts\") pod \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\" (UID: \"5ba7d0b8-1d6d-4630-b574-7d0e55409e25\") " Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.711463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5ba7d0b8-1d6d-4630-b574-7d0e55409e25" (UID: "5ba7d0b8-1d6d-4630-b574-7d0e55409e25"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.719702 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-kube-api-access-stbwg" (OuterVolumeSpecName: "kube-api-access-stbwg") pod "5ba7d0b8-1d6d-4630-b574-7d0e55409e25" (UID: "5ba7d0b8-1d6d-4630-b574-7d0e55409e25"). InnerVolumeSpecName "kube-api-access-stbwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.813471 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stbwg\" (UniqueName: \"kubernetes.io/projected/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-kube-api-access-stbwg\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.813513 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba7d0b8-1d6d-4630-b574-7d0e55409e25-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.907775 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.917927 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.927591 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.952892 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:19 crc kubenswrapper[4773]: I0122 12:17:19.967549 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.016549 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56ptz\" (UniqueName: \"kubernetes.io/projected/c8845496-a1b9-4d8e-b60c-b657ceb619f5-kube-api-access-56ptz\") pod \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.016603 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8845496-a1b9-4d8e-b60c-b657ceb619f5-operator-scripts\") pod \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\" (UID: \"c8845496-a1b9-4d8e-b60c-b657ceb619f5\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.017737 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8845496-a1b9-4d8e-b60c-b657ceb619f5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c8845496-a1b9-4d8e-b60c-b657ceb619f5" (UID: "c8845496-a1b9-4d8e-b60c-b657ceb619f5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.022100 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8845496-a1b9-4d8e-b60c-b657ceb619f5-kube-api-access-56ptz" (OuterVolumeSpecName: "kube-api-access-56ptz") pod "c8845496-a1b9-4d8e-b60c-b657ceb619f5" (UID: "c8845496-a1b9-4d8e-b60c-b657ceb619f5"). InnerVolumeSpecName "kube-api-access-56ptz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.117882 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjxcz\" (UniqueName: \"kubernetes.io/projected/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-kube-api-access-jjxcz\") pod \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118210 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05299183-b5cc-4655-b5d2-6a89d1f5c138-operator-scripts\") pod \"05299183-b5cc-4655-b5d2-6a89d1f5c138\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118395 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rs6v\" (UniqueName: \"kubernetes.io/projected/05299183-b5cc-4655-b5d2-6a89d1f5c138-kube-api-access-7rs6v\") pod \"05299183-b5cc-4655-b5d2-6a89d1f5c138\" (UID: \"05299183-b5cc-4655-b5d2-6a89d1f5c138\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118482 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jpgn\" (UniqueName: \"kubernetes.io/projected/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-kube-api-access-9jpgn\") pod \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118529 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-operator-scripts\") pod \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\" (UID: \"62ef8b2b-1672-4050-9d1a-0dbdef69fcae\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118599 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-operator-scripts\") pod \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\" (UID: \"f8bbf2c9-35f6-4c9e-a959-6977302e59aa\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118622 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e274f9a-f487-4a76-864b-38f0b4e80ed9-operator-scripts\") pod \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118681 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdjhq\" (UniqueName: \"kubernetes.io/projected/5e274f9a-f487-4a76-864b-38f0b4e80ed9-kube-api-access-wdjhq\") pod \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\" (UID: \"5e274f9a-f487-4a76-864b-38f0b4e80ed9\") " Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.118887 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05299183-b5cc-4655-b5d2-6a89d1f5c138-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05299183-b5cc-4655-b5d2-6a89d1f5c138" (UID: "05299183-b5cc-4655-b5d2-6a89d1f5c138"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119124 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "62ef8b2b-1672-4050-9d1a-0dbdef69fcae" (UID: "62ef8b2b-1672-4050-9d1a-0dbdef69fcae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119224 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f8bbf2c9-35f6-4c9e-a959-6977302e59aa" (UID: "f8bbf2c9-35f6-4c9e-a959-6977302e59aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e274f9a-f487-4a76-864b-38f0b4e80ed9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5e274f9a-f487-4a76-864b-38f0b4e80ed9" (UID: "5e274f9a-f487-4a76-864b-38f0b4e80ed9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119905 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119930 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56ptz\" (UniqueName: \"kubernetes.io/projected/c8845496-a1b9-4d8e-b60c-b657ceb619f5-kube-api-access-56ptz\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119944 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119959 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e274f9a-f487-4a76-864b-38f0b4e80ed9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119970 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8845496-a1b9-4d8e-b60c-b657ceb619f5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.119981 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05299183-b5cc-4655-b5d2-6a89d1f5c138-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.123153 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-kube-api-access-jjxcz" (OuterVolumeSpecName: "kube-api-access-jjxcz") pod "f8bbf2c9-35f6-4c9e-a959-6977302e59aa" (UID: "f8bbf2c9-35f6-4c9e-a959-6977302e59aa"). InnerVolumeSpecName "kube-api-access-jjxcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.123851 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e274f9a-f487-4a76-864b-38f0b4e80ed9-kube-api-access-wdjhq" (OuterVolumeSpecName: "kube-api-access-wdjhq") pod "5e274f9a-f487-4a76-864b-38f0b4e80ed9" (UID: "5e274f9a-f487-4a76-864b-38f0b4e80ed9"). InnerVolumeSpecName "kube-api-access-wdjhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.123895 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-kube-api-access-9jpgn" (OuterVolumeSpecName: "kube-api-access-9jpgn") pod "62ef8b2b-1672-4050-9d1a-0dbdef69fcae" (UID: "62ef8b2b-1672-4050-9d1a-0dbdef69fcae"). InnerVolumeSpecName "kube-api-access-9jpgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.124362 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05299183-b5cc-4655-b5d2-6a89d1f5c138-kube-api-access-7rs6v" (OuterVolumeSpecName: "kube-api-access-7rs6v") pod "05299183-b5cc-4655-b5d2-6a89d1f5c138" (UID: "05299183-b5cc-4655-b5d2-6a89d1f5c138"). InnerVolumeSpecName "kube-api-access-7rs6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.220700 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdjhq\" (UniqueName: \"kubernetes.io/projected/5e274f9a-f487-4a76-864b-38f0b4e80ed9-kube-api-access-wdjhq\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.220731 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjxcz\" (UniqueName: \"kubernetes.io/projected/f8bbf2c9-35f6-4c9e-a959-6977302e59aa-kube-api-access-jjxcz\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.220741 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rs6v\" (UniqueName: \"kubernetes.io/projected/05299183-b5cc-4655-b5d2-6a89d1f5c138-kube-api-access-7rs6v\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.220750 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jpgn\" (UniqueName: \"kubernetes.io/projected/62ef8b2b-1672-4050-9d1a-0dbdef69fcae-kube-api-access-9jpgn\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.245416 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pdmmj" event={"ID":"5ba7d0b8-1d6d-4630-b574-7d0e55409e25","Type":"ContainerDied","Data":"6a1eb4b0858a82267852de4dc5a67e381f9f095841cda418ccb53a7f51309733"} Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.245472 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a1eb4b0858a82267852de4dc5a67e381f9f095841cda418ccb53a7f51309733" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.245549 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pdmmj" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.247741 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" event={"ID":"c8845496-a1b9-4d8e-b60c-b657ceb619f5","Type":"ContainerDied","Data":"a54d928f1f19923f0dce277d4c4cc83b12d17b022ce630d2c7b302611e1e9880"} Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.247796 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a54d928f1f19923f0dce277d4c4cc83b12d17b022ce630d2c7b302611e1e9880" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.247756 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-qqn6s" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.250147 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" event={"ID":"62ef8b2b-1672-4050-9d1a-0dbdef69fcae","Type":"ContainerDied","Data":"a7005ab948d9ca12ca91892ead4a2abdfe06ee19efb5be4d2291d49baadb4bc6"} Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.250211 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7005ab948d9ca12ca91892ead4a2abdfe06ee19efb5be4d2291d49baadb4bc6" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.250168 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-ddfzr" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.252017 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" event={"ID":"05299183-b5cc-4655-b5d2-6a89d1f5c138","Type":"ContainerDied","Data":"2ea11d7fb758f114fe1ff5a749557c1adeb4fa3438c369900e2db72488636232"} Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.252055 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-m2dw9" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.252060 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ea11d7fb758f114fe1ff5a749557c1adeb4fa3438c369900e2db72488636232" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.253828 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-sqv54" event={"ID":"f8bbf2c9-35f6-4c9e-a959-6977302e59aa","Type":"ContainerDied","Data":"d3a6972ff767caca928f6b53681b5f51528af04c6b1357da122cd501e03de520"} Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.253866 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3a6972ff767caca928f6b53681b5f51528af04c6b1357da122cd501e03de520" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.253925 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-sqv54" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.256461 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5xcwn" event={"ID":"5e274f9a-f487-4a76-864b-38f0b4e80ed9","Type":"ContainerDied","Data":"507929d18f89c0b164ab0b2d2aa59779d7fed3911df37db3256ec8e2c3403724"} Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.256496 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="507929d18f89c0b164ab0b2d2aa59779d7fed3911df37db3256ec8e2c3403724" Jan 22 12:17:20 crc kubenswrapper[4773]: I0122 12:17:20.256511 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5xcwn" Jan 22 12:17:21 crc kubenswrapper[4773]: I0122 12:17:21.268991 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerStarted","Data":"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc"} Jan 22 12:17:22 crc kubenswrapper[4773]: I0122 12:17:22.280388 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerStarted","Data":"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8"} Jan 22 12:17:22 crc kubenswrapper[4773]: I0122 12:17:22.280792 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerStarted","Data":"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8"} Jan 22 12:17:22 crc kubenswrapper[4773]: I0122 12:17:22.855972 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:23 crc kubenswrapper[4773]: I0122 12:17:23.531942 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:17:23 crc kubenswrapper[4773]: I0122 12:17:23.668129 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55bdb45cbd-kgx4q"] Jan 22 12:17:23 crc kubenswrapper[4773]: I0122 12:17:23.678405 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55bdb45cbd-kgx4q" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-api" containerID="cri-o://f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c" gracePeriod=30 Jan 22 12:17:23 crc kubenswrapper[4773]: I0122 12:17:23.679229 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-55bdb45cbd-kgx4q" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-httpd" containerID="cri-o://c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a" gracePeriod=30 Jan 22 12:17:24 crc kubenswrapper[4773]: E0122 12:17:24.066448 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3132a41_c2bd_4cbb_b319_25982363decb.slice/crio-conmon-c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a.scope\": RecentStats: unable to find data in memory cache]" Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.308101 4773 generic.go:334] "Generic (PLEG): container finished" podID="a3132a41-c2bd-4cbb-b319-25982363decb" containerID="c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a" exitCode=0 Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.308794 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bdb45cbd-kgx4q" event={"ID":"a3132a41-c2bd-4cbb-b319-25982363decb","Type":"ContainerDied","Data":"c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a"} Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.315703 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerStarted","Data":"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df"} Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.315915 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-central-agent" containerID="cri-o://600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" gracePeriod=30 Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.316225 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.316547 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="proxy-httpd" containerID="cri-o://8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" gracePeriod=30 Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.316598 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="sg-core" containerID="cri-o://e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" gracePeriod=30 Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.316633 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-notification-agent" containerID="cri-o://b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" gracePeriod=30 Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.352495 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.773226744 podStartE2EDuration="7.35247474s" podCreationTimestamp="2026-01-22 12:17:17 +0000 UTC" firstStartedPulling="2026-01-22 12:17:18.512202906 +0000 UTC m=+1346.090318731" lastFinishedPulling="2026-01-22 12:17:23.091450902 +0000 UTC m=+1350.669566727" observedRunningTime="2026-01-22 12:17:24.344555117 +0000 UTC m=+1351.922670952" watchObservedRunningTime="2026-01-22 12:17:24.35247474 +0000 UTC m=+1351.930590565" Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.359718 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:24 crc kubenswrapper[4773]: I0122 12:17:24.360454 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.243795 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.328888 4773 generic.go:334] "Generic (PLEG): container finished" podID="241ba95d-13d5-49b7-8616-8f227297154d" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" exitCode=0 Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.328919 4773 generic.go:334] "Generic (PLEG): container finished" podID="241ba95d-13d5-49b7-8616-8f227297154d" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" exitCode=2 Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.328929 4773 generic.go:334] "Generic (PLEG): container finished" podID="241ba95d-13d5-49b7-8616-8f227297154d" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" exitCode=0 Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.328937 4773 generic.go:334] "Generic (PLEG): container finished" podID="241ba95d-13d5-49b7-8616-8f227297154d" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" exitCode=0 Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.328968 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.329027 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerDied","Data":"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df"} Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.329057 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerDied","Data":"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8"} Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.329068 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerDied","Data":"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8"} Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.329076 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerDied","Data":"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc"} Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.329085 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"241ba95d-13d5-49b7-8616-8f227297154d","Type":"ContainerDied","Data":"15d1f96339c3c2f6278fbfda10282b7a88813d3211b8e26b86b86801e9e1ece8"} Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.329100 4773 scope.go:117] "RemoveContainer" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350231 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-config-data\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350279 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-sg-core-conf-yaml\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350339 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-combined-ca-bundle\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350377 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp5g6\" (UniqueName: \"kubernetes.io/projected/241ba95d-13d5-49b7-8616-8f227297154d-kube-api-access-tp5g6\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350404 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-scripts\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350452 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-log-httpd\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350662 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-run-httpd\") pod \"241ba95d-13d5-49b7-8616-8f227297154d\" (UID: \"241ba95d-13d5-49b7-8616-8f227297154d\") " Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.350989 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.351065 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.351192 4773 scope.go:117] "RemoveContainer" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.352836 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.352863 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/241ba95d-13d5-49b7-8616-8f227297154d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.357051 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/241ba95d-13d5-49b7-8616-8f227297154d-kube-api-access-tp5g6" (OuterVolumeSpecName: "kube-api-access-tp5g6") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "kube-api-access-tp5g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.358632 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-scripts" (OuterVolumeSpecName: "scripts") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.384198 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.389049 4773 scope.go:117] "RemoveContainer" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.425616 4773 scope.go:117] "RemoveContainer" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.452037 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.454835 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.454859 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.454870 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp5g6\" (UniqueName: \"kubernetes.io/projected/241ba95d-13d5-49b7-8616-8f227297154d-kube-api-access-tp5g6\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.454881 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.487412 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-config-data" (OuterVolumeSpecName: "config-data") pod "241ba95d-13d5-49b7-8616-8f227297154d" (UID: "241ba95d-13d5-49b7-8616-8f227297154d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.549452 4773 scope.go:117] "RemoveContainer" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.550595 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": container with ID starting with 8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df not found: ID does not exist" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.550647 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df"} err="failed to get container status \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": rpc error: code = NotFound desc = could not find container \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": container with ID starting with 8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.550685 4773 scope.go:117] "RemoveContainer" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.551004 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": container with ID starting with e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8 not found: ID does not exist" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.551037 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8"} err="failed to get container status \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": rpc error: code = NotFound desc = could not find container \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": container with ID starting with e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.551061 4773 scope.go:117] "RemoveContainer" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.552417 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": container with ID starting with b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8 not found: ID does not exist" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.552475 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8"} err="failed to get container status \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": rpc error: code = NotFound desc = could not find container \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": container with ID starting with b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.552509 4773 scope.go:117] "RemoveContainer" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.553175 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": container with ID starting with 600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc not found: ID does not exist" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.553228 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc"} err="failed to get container status \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": rpc error: code = NotFound desc = could not find container \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": container with ID starting with 600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.553258 4773 scope.go:117] "RemoveContainer" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.553718 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df"} err="failed to get container status \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": rpc error: code = NotFound desc = could not find container \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": container with ID starting with 8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.553754 4773 scope.go:117] "RemoveContainer" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.554147 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8"} err="failed to get container status \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": rpc error: code = NotFound desc = could not find container \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": container with ID starting with e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.554172 4773 scope.go:117] "RemoveContainer" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.554418 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8"} err="failed to get container status \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": rpc error: code = NotFound desc = could not find container \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": container with ID starting with b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.554435 4773 scope.go:117] "RemoveContainer" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.554743 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc"} err="failed to get container status \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": rpc error: code = NotFound desc = could not find container \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": container with ID starting with 600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.554782 4773 scope.go:117] "RemoveContainer" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.555112 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df"} err="failed to get container status \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": rpc error: code = NotFound desc = could not find container \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": container with ID starting with 8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.555130 4773 scope.go:117] "RemoveContainer" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.555422 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8"} err="failed to get container status \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": rpc error: code = NotFound desc = could not find container \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": container with ID starting with e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.555462 4773 scope.go:117] "RemoveContainer" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.556347 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8"} err="failed to get container status \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": rpc error: code = NotFound desc = could not find container \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": container with ID starting with b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.556375 4773 scope.go:117] "RemoveContainer" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.556635 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc"} err="failed to get container status \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": rpc error: code = NotFound desc = could not find container \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": container with ID starting with 600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.556681 4773 scope.go:117] "RemoveContainer" containerID="8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.556992 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df"} err="failed to get container status \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": rpc error: code = NotFound desc = could not find container \"8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df\": container with ID starting with 8e750e57addd9e042f498ac5c935a75a173571cd5bae5651bb9e592233c6b3df not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.557020 4773 scope.go:117] "RemoveContainer" containerID="e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.557213 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241ba95d-13d5-49b7-8616-8f227297154d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.557336 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8"} err="failed to get container status \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": rpc error: code = NotFound desc = could not find container \"e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8\": container with ID starting with e2b6cf43cd317af8f3553330c4ba21368239ebc217ec87c5a1b6e1e6f43cf5d8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.557369 4773 scope.go:117] "RemoveContainer" containerID="b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.557719 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8"} err="failed to get container status \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": rpc error: code = NotFound desc = could not find container \"b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8\": container with ID starting with b3ebddb309e596bf8afd651742f2c947d1eddda1b817297b33624a6958045dc8 not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.557763 4773 scope.go:117] "RemoveContainer" containerID="600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.558081 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc"} err="failed to get container status \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": rpc error: code = NotFound desc = could not find container \"600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc\": container with ID starting with 600e3bbec8337fb68b482fca72911bd2a33019ef3ed338ba8abba76221e753cc not found: ID does not exist" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.666099 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.675887 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.688198 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689103 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-notification-agent" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.689129 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-notification-agent" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689147 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8845496-a1b9-4d8e-b60c-b657ceb619f5" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.689156 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8845496-a1b9-4d8e-b60c-b657ceb619f5" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689177 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e274f9a-f487-4a76-864b-38f0b4e80ed9" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.689184 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e274f9a-f487-4a76-864b-38f0b4e80ed9" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689198 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-central-agent" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.689210 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-central-agent" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689224 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05299183-b5cc-4655-b5d2-6a89d1f5c138" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.689232 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="05299183-b5cc-4655-b5d2-6a89d1f5c138" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689245 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8bbf2c9-35f6-4c9e-a959-6977302e59aa" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.689253 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8bbf2c9-35f6-4c9e-a959-6977302e59aa" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.689271 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="sg-core" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.691073 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="sg-core" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.695590 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62ef8b2b-1672-4050-9d1a-0dbdef69fcae" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.695615 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62ef8b2b-1672-4050-9d1a-0dbdef69fcae" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.695627 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="proxy-httpd" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.695633 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="proxy-httpd" Jan 22 12:17:25 crc kubenswrapper[4773]: E0122 12:17:25.695651 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba7d0b8-1d6d-4630-b574-7d0e55409e25" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.695657 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba7d0b8-1d6d-4630-b574-7d0e55409e25" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696086 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62ef8b2b-1672-4050-9d1a-0dbdef69fcae" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696110 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8845496-a1b9-4d8e-b60c-b657ceb619f5" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696132 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-notification-agent" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696146 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e274f9a-f487-4a76-864b-38f0b4e80ed9" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696155 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8bbf2c9-35f6-4c9e-a959-6977302e59aa" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696169 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ba7d0b8-1d6d-4630-b574-7d0e55409e25" containerName="mariadb-database-create" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696184 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="05299183-b5cc-4655-b5d2-6a89d1f5c138" containerName="mariadb-account-create-update" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696195 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="sg-core" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696204 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="proxy-httpd" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.696211 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="241ba95d-13d5-49b7-8616-8f227297154d" containerName="ceilometer-central-agent" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.698221 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.701648 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.702463 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.703832 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760171 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-scripts\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760265 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-config-data\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760407 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zc5d\" (UniqueName: \"kubernetes.io/projected/7e4f010e-3095-412b-a4bb-d34821aadc38-kube-api-access-4zc5d\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760434 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760495 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-log-httpd\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760514 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.760597 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-run-httpd\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.862209 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-scripts\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.862948 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-config-data\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863079 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zc5d\" (UniqueName: \"kubernetes.io/projected/7e4f010e-3095-412b-a4bb-d34821aadc38-kube-api-access-4zc5d\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863107 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863142 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-log-httpd\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863158 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863190 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-run-httpd\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863916 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-log-httpd\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.863963 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-run-httpd\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.866405 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-scripts\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.868332 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.871392 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-config-data\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.872263 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.900386 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zc5d\" (UniqueName: \"kubernetes.io/projected/7e4f010e-3095-412b-a4bb-d34821aadc38-kube-api-access-4zc5d\") pod \"ceilometer-0\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " pod="openstack/ceilometer-0" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.955618 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n2bkh"] Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.956757 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.960361 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xjgc7" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.960404 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.960712 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 12:17:25 crc kubenswrapper[4773]: I0122 12:17:25.978011 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n2bkh"] Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.068662 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.068798 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-config-data\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.068895 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wwnr\" (UniqueName: \"kubernetes.io/projected/e18e0db7-6f23-4114-82cd-3d1d393db415-kube-api-access-9wwnr\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.068926 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-scripts\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.077496 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.171086 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wwnr\" (UniqueName: \"kubernetes.io/projected/e18e0db7-6f23-4114-82cd-3d1d393db415-kube-api-access-9wwnr\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.171257 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-scripts\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.172132 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.172584 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-config-data\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.176914 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-scripts\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.177489 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.179366 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-config-data\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.198521 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wwnr\" (UniqueName: \"kubernetes.io/projected/e18e0db7-6f23-4114-82cd-3d1d393db415-kube-api-access-9wwnr\") pod \"nova-cell0-conductor-db-sync-n2bkh\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.285970 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.565907 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.603960 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n2bkh"] Jan 22 12:17:26 crc kubenswrapper[4773]: I0122 12:17:26.669307 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="241ba95d-13d5-49b7-8616-8f227297154d" path="/var/lib/kubelet/pods/241ba95d-13d5-49b7-8616-8f227297154d/volumes" Jan 22 12:17:27 crc kubenswrapper[4773]: I0122 12:17:27.376476 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:27 crc kubenswrapper[4773]: I0122 12:17:27.394814 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerStarted","Data":"4071c754b4f05d91eb197692e85275da69b27fd2ce2c0c2e7242b59f6c5406b3"} Jan 22 12:17:27 crc kubenswrapper[4773]: I0122 12:17:27.395130 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerStarted","Data":"39e8e326e1552c4ece7f6aeffd5f180776ccd6932f80478f220714e736c98688"} Jan 22 12:17:27 crc kubenswrapper[4773]: I0122 12:17:27.399547 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" event={"ID":"e18e0db7-6f23-4114-82cd-3d1d393db415","Type":"ContainerStarted","Data":"0a9c7ab24425325496a715763ac3b01e7149358b0496c174d13c72d28ff15e0e"} Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.281534 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.434459 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerStarted","Data":"a676f371eccc38a732da10717a7fd9c02f26f12d9c6697338574eff1bb2723dc"} Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.442478 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbgjk\" (UniqueName: \"kubernetes.io/projected/a3132a41-c2bd-4cbb-b319-25982363decb-kube-api-access-sbgjk\") pod \"a3132a41-c2bd-4cbb-b319-25982363decb\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.442686 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-httpd-config\") pod \"a3132a41-c2bd-4cbb-b319-25982363decb\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.442722 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-ovndb-tls-certs\") pod \"a3132a41-c2bd-4cbb-b319-25982363decb\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.443017 4773 generic.go:334] "Generic (PLEG): container finished" podID="a3132a41-c2bd-4cbb-b319-25982363decb" containerID="f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c" exitCode=0 Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.443089 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bdb45cbd-kgx4q" event={"ID":"a3132a41-c2bd-4cbb-b319-25982363decb","Type":"ContainerDied","Data":"f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c"} Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.443133 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-55bdb45cbd-kgx4q" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.443143 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-55bdb45cbd-kgx4q" event={"ID":"a3132a41-c2bd-4cbb-b319-25982363decb","Type":"ContainerDied","Data":"8fae5ebcb3266c7cdd0041a297774d2cb2a3f340d7159b66ff0e6c4eedc41233"} Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.443174 4773 scope.go:117] "RemoveContainer" containerID="c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.445840 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-config\") pod \"a3132a41-c2bd-4cbb-b319-25982363decb\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.445998 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-combined-ca-bundle\") pod \"a3132a41-c2bd-4cbb-b319-25982363decb\" (UID: \"a3132a41-c2bd-4cbb-b319-25982363decb\") " Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.450634 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3132a41-c2bd-4cbb-b319-25982363decb-kube-api-access-sbgjk" (OuterVolumeSpecName: "kube-api-access-sbgjk") pod "a3132a41-c2bd-4cbb-b319-25982363decb" (UID: "a3132a41-c2bd-4cbb-b319-25982363decb"). InnerVolumeSpecName "kube-api-access-sbgjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.467810 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a3132a41-c2bd-4cbb-b319-25982363decb" (UID: "a3132a41-c2bd-4cbb-b319-25982363decb"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.475900 4773 scope.go:117] "RemoveContainer" containerID="f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.539867 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3132a41-c2bd-4cbb-b319-25982363decb" (UID: "a3132a41-c2bd-4cbb-b319-25982363decb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.545903 4773 scope.go:117] "RemoveContainer" containerID="c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a" Jan 22 12:17:28 crc kubenswrapper[4773]: E0122 12:17:28.547046 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a\": container with ID starting with c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a not found: ID does not exist" containerID="c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.547206 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a"} err="failed to get container status \"c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a\": rpc error: code = NotFound desc = could not find container \"c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a\": container with ID starting with c053b77bbc8f7fc214ae24aaf290772192e2756cd5ebe3bc8c9599a136cd5b5a not found: ID does not exist" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.547259 4773 scope.go:117] "RemoveContainer" containerID="f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.549458 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.549482 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.549495 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbgjk\" (UniqueName: \"kubernetes.io/projected/a3132a41-c2bd-4cbb-b319-25982363decb-kube-api-access-sbgjk\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:28 crc kubenswrapper[4773]: E0122 12:17:28.551586 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c\": container with ID starting with f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c not found: ID does not exist" containerID="f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.551632 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c"} err="failed to get container status \"f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c\": rpc error: code = NotFound desc = could not find container \"f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c\": container with ID starting with f07f56c5958231bdfb779ff9ba539a12ed4d74e6933ec48919e9aa5eef979d6c not found: ID does not exist" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.554906 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-config" (OuterVolumeSpecName: "config") pod "a3132a41-c2bd-4cbb-b319-25982363decb" (UID: "a3132a41-c2bd-4cbb-b319-25982363decb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.591153 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a3132a41-c2bd-4cbb-b319-25982363decb" (UID: "a3132a41-c2bd-4cbb-b319-25982363decb"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.659428 4773 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.660087 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a3132a41-c2bd-4cbb-b319-25982363decb-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.867629 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-55bdb45cbd-kgx4q"] Jan 22 12:17:28 crc kubenswrapper[4773]: I0122 12:17:28.878514 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-55bdb45cbd-kgx4q"] Jan 22 12:17:29 crc kubenswrapper[4773]: I0122 12:17:29.459052 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerStarted","Data":"0eb179cdf77fb6c4252b398e3d5ac6541ae37560bcf9d4eb7e3cf41f396eb0c4"} Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.474256 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerStarted","Data":"18500ba390867ccefe081a04d88d11577945cda62af10a1336810a72b85b2227"} Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.474701 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.474578 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="sg-core" containerID="cri-o://0eb179cdf77fb6c4252b398e3d5ac6541ae37560bcf9d4eb7e3cf41f396eb0c4" gracePeriod=30 Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.474482 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-central-agent" containerID="cri-o://4071c754b4f05d91eb197692e85275da69b27fd2ce2c0c2e7242b59f6c5406b3" gracePeriod=30 Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.474603 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="proxy-httpd" containerID="cri-o://18500ba390867ccefe081a04d88d11577945cda62af10a1336810a72b85b2227" gracePeriod=30 Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.474621 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-notification-agent" containerID="cri-o://a676f371eccc38a732da10717a7fd9c02f26f12d9c6697338574eff1bb2723dc" gracePeriod=30 Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.503272 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.051799688 podStartE2EDuration="5.503251991s" podCreationTimestamp="2026-01-22 12:17:25 +0000 UTC" firstStartedPulling="2026-01-22 12:17:26.580215492 +0000 UTC m=+1354.158331317" lastFinishedPulling="2026-01-22 12:17:30.031667795 +0000 UTC m=+1357.609783620" observedRunningTime="2026-01-22 12:17:30.499399573 +0000 UTC m=+1358.077515398" watchObservedRunningTime="2026-01-22 12:17:30.503251991 +0000 UTC m=+1358.081367826" Jan 22 12:17:30 crc kubenswrapper[4773]: I0122 12:17:30.697603 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" path="/var/lib/kubelet/pods/a3132a41-c2bd-4cbb-b319-25982363decb/volumes" Jan 22 12:17:31 crc kubenswrapper[4773]: I0122 12:17:31.487476 4773 generic.go:334] "Generic (PLEG): container finished" podID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerID="18500ba390867ccefe081a04d88d11577945cda62af10a1336810a72b85b2227" exitCode=0 Jan 22 12:17:31 crc kubenswrapper[4773]: I0122 12:17:31.487913 4773 generic.go:334] "Generic (PLEG): container finished" podID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerID="0eb179cdf77fb6c4252b398e3d5ac6541ae37560bcf9d4eb7e3cf41f396eb0c4" exitCode=2 Jan 22 12:17:31 crc kubenswrapper[4773]: I0122 12:17:31.487927 4773 generic.go:334] "Generic (PLEG): container finished" podID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerID="a676f371eccc38a732da10717a7fd9c02f26f12d9c6697338574eff1bb2723dc" exitCode=0 Jan 22 12:17:31 crc kubenswrapper[4773]: I0122 12:17:31.487655 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerDied","Data":"18500ba390867ccefe081a04d88d11577945cda62af10a1336810a72b85b2227"} Jan 22 12:17:31 crc kubenswrapper[4773]: I0122 12:17:31.487966 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerDied","Data":"0eb179cdf77fb6c4252b398e3d5ac6541ae37560bcf9d4eb7e3cf41f396eb0c4"} Jan 22 12:17:31 crc kubenswrapper[4773]: I0122 12:17:31.487987 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerDied","Data":"a676f371eccc38a732da10717a7fd9c02f26f12d9c6697338574eff1bb2723dc"} Jan 22 12:17:34 crc kubenswrapper[4773]: I0122 12:17:34.073935 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:17:34 crc kubenswrapper[4773]: I0122 12:17:34.074372 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.536869 4773 generic.go:334] "Generic (PLEG): container finished" podID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerID="4071c754b4f05d91eb197692e85275da69b27fd2ce2c0c2e7242b59f6c5406b3" exitCode=0 Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.536937 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerDied","Data":"4071c754b4f05d91eb197692e85275da69b27fd2ce2c0c2e7242b59f6c5406b3"} Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.867780 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981366 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-sg-core-conf-yaml\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981422 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-config-data\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981457 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-combined-ca-bundle\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981509 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-run-httpd\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981536 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-log-httpd\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981589 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-scripts\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.981627 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zc5d\" (UniqueName: \"kubernetes.io/projected/7e4f010e-3095-412b-a4bb-d34821aadc38-kube-api-access-4zc5d\") pod \"7e4f010e-3095-412b-a4bb-d34821aadc38\" (UID: \"7e4f010e-3095-412b-a4bb-d34821aadc38\") " Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.982440 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.982594 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.985855 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-scripts" (OuterVolumeSpecName: "scripts") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:35 crc kubenswrapper[4773]: I0122 12:17:35.985868 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4f010e-3095-412b-a4bb-d34821aadc38-kube-api-access-4zc5d" (OuterVolumeSpecName: "kube-api-access-4zc5d") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "kube-api-access-4zc5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.006186 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.054514 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.084385 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.084416 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7e4f010e-3095-412b-a4bb-d34821aadc38-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.084425 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.084433 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zc5d\" (UniqueName: \"kubernetes.io/projected/7e4f010e-3095-412b-a4bb-d34821aadc38-kube-api-access-4zc5d\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.084443 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.084454 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.091758 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-config-data" (OuterVolumeSpecName: "config-data") pod "7e4f010e-3095-412b-a4bb-d34821aadc38" (UID: "7e4f010e-3095-412b-a4bb-d34821aadc38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.186170 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e4f010e-3095-412b-a4bb-d34821aadc38-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.549122 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7e4f010e-3095-412b-a4bb-d34821aadc38","Type":"ContainerDied","Data":"39e8e326e1552c4ece7f6aeffd5f180776ccd6932f80478f220714e736c98688"} Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.549186 4773 scope.go:117] "RemoveContainer" containerID="18500ba390867ccefe081a04d88d11577945cda62af10a1336810a72b85b2227" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.549711 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.550428 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" event={"ID":"e18e0db7-6f23-4114-82cd-3d1d393db415","Type":"ContainerStarted","Data":"27c3e0c464e09904962671874cb61cd552b7a21244651c606261999643bca822"} Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.575634 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" podStartSLOduration=2.500650261 podStartE2EDuration="11.575613228s" podCreationTimestamp="2026-01-22 12:17:25 +0000 UTC" firstStartedPulling="2026-01-22 12:17:26.620446163 +0000 UTC m=+1354.198561988" lastFinishedPulling="2026-01-22 12:17:35.69540913 +0000 UTC m=+1363.273524955" observedRunningTime="2026-01-22 12:17:36.569108245 +0000 UTC m=+1364.147224090" watchObservedRunningTime="2026-01-22 12:17:36.575613228 +0000 UTC m=+1364.153729053" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.591719 4773 scope.go:117] "RemoveContainer" containerID="0eb179cdf77fb6c4252b398e3d5ac6541ae37560bcf9d4eb7e3cf41f396eb0c4" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.594561 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.614750 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.623522 4773 scope.go:117] "RemoveContainer" containerID="a676f371eccc38a732da10717a7fd9c02f26f12d9c6697338574eff1bb2723dc" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.646949 4773 scope.go:117] "RemoveContainer" containerID="4071c754b4f05d91eb197692e85275da69b27fd2ce2c0c2e7242b59f6c5406b3" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.670471 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" path="/var/lib/kubelet/pods/7e4f010e-3095-412b-a4bb-d34821aadc38/volumes" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671331 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:36 crc kubenswrapper[4773]: E0122 12:17:36.671686 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-api" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671710 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-api" Jan 22 12:17:36 crc kubenswrapper[4773]: E0122 12:17:36.671725 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-notification-agent" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671733 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-notification-agent" Jan 22 12:17:36 crc kubenswrapper[4773]: E0122 12:17:36.671759 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="proxy-httpd" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671766 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="proxy-httpd" Jan 22 12:17:36 crc kubenswrapper[4773]: E0122 12:17:36.671790 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-central-agent" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671797 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-central-agent" Jan 22 12:17:36 crc kubenswrapper[4773]: E0122 12:17:36.671807 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="sg-core" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671814 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="sg-core" Jan 22 12:17:36 crc kubenswrapper[4773]: E0122 12:17:36.671837 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-httpd" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.671846 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-httpd" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.672057 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-central-agent" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.672072 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="sg-core" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.672084 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-httpd" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.672099 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3132a41-c2bd-4cbb-b319-25982363decb" containerName="neutron-api" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.672113 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="ceilometer-notification-agent" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.672128 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4f010e-3095-412b-a4bb-d34821aadc38" containerName="proxy-httpd" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.674043 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.674139 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.679749 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.680346 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797346 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t75nw\" (UniqueName: \"kubernetes.io/projected/2e6c44d1-387c-490e-99d8-25077af18d64-kube-api-access-t75nw\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797472 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-config-data\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797537 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-scripts\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797610 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-log-httpd\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797760 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797790 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-run-httpd\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.797832 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899685 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899746 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-run-httpd\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899798 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899865 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t75nw\" (UniqueName: \"kubernetes.io/projected/2e6c44d1-387c-490e-99d8-25077af18d64-kube-api-access-t75nw\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899902 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-config-data\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899955 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-scripts\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.899991 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-log-httpd\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.900745 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-run-httpd\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.900946 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-log-httpd\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.905473 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-scripts\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.905630 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.920093 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.925028 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t75nw\" (UniqueName: \"kubernetes.io/projected/2e6c44d1-387c-490e-99d8-25077af18d64-kube-api-access-t75nw\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.926497 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-config-data\") pod \"ceilometer-0\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " pod="openstack/ceilometer-0" Jan 22 12:17:36 crc kubenswrapper[4773]: I0122 12:17:36.993797 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:37 crc kubenswrapper[4773]: I0122 12:17:37.459231 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:37 crc kubenswrapper[4773]: W0122 12:17:37.460784 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e6c44d1_387c_490e_99d8_25077af18d64.slice/crio-d40bcc8474dd6268ab77f51efc57af36e9d6ba594985bc3d706c19a1ca51dbf6 WatchSource:0}: Error finding container d40bcc8474dd6268ab77f51efc57af36e9d6ba594985bc3d706c19a1ca51dbf6: Status 404 returned error can't find the container with id d40bcc8474dd6268ab77f51efc57af36e9d6ba594985bc3d706c19a1ca51dbf6 Jan 22 12:17:37 crc kubenswrapper[4773]: I0122 12:17:37.559804 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerStarted","Data":"d40bcc8474dd6268ab77f51efc57af36e9d6ba594985bc3d706c19a1ca51dbf6"} Jan 22 12:17:38 crc kubenswrapper[4773]: I0122 12:17:38.571487 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerStarted","Data":"f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef"} Jan 22 12:17:39 crc kubenswrapper[4773]: I0122 12:17:39.587832 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerStarted","Data":"acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec"} Jan 22 12:17:40 crc kubenswrapper[4773]: I0122 12:17:40.602272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerStarted","Data":"3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a"} Jan 22 12:17:41 crc kubenswrapper[4773]: I0122 12:17:41.615592 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:17:41 crc kubenswrapper[4773]: I0122 12:17:41.616126 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerStarted","Data":"e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d"} Jan 22 12:17:41 crc kubenswrapper[4773]: I0122 12:17:41.616426 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-log" containerID="cri-o://62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f" gracePeriod=30 Jan 22 12:17:41 crc kubenswrapper[4773]: I0122 12:17:41.616498 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-httpd" containerID="cri-o://e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89" gracePeriod=30 Jan 22 12:17:41 crc kubenswrapper[4773]: I0122 12:17:41.658725 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.4815328770000002 podStartE2EDuration="5.658703482s" podCreationTimestamp="2026-01-22 12:17:36 +0000 UTC" firstStartedPulling="2026-01-22 12:17:37.463546513 +0000 UTC m=+1365.041662338" lastFinishedPulling="2026-01-22 12:17:40.640717118 +0000 UTC m=+1368.218832943" observedRunningTime="2026-01-22 12:17:41.650962934 +0000 UTC m=+1369.229078779" watchObservedRunningTime="2026-01-22 12:17:41.658703482 +0000 UTC m=+1369.236819297" Jan 22 12:17:42 crc kubenswrapper[4773]: I0122 12:17:42.631713 4773 generic.go:334] "Generic (PLEG): container finished" podID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerID="62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f" exitCode=143 Jan 22 12:17:42 crc kubenswrapper[4773]: I0122 12:17:42.631872 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c5afcb43-61a0-47ad-923e-ca320d6ca49f","Type":"ContainerDied","Data":"62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f"} Jan 22 12:17:42 crc kubenswrapper[4773]: I0122 12:17:42.632472 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:17:42 crc kubenswrapper[4773]: I0122 12:17:42.650807 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:17:42 crc kubenswrapper[4773]: I0122 12:17:42.651314 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-httpd" containerID="cri-o://37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db" gracePeriod=30 Jan 22 12:17:42 crc kubenswrapper[4773]: I0122 12:17:42.651522 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-log" containerID="cri-o://8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66" gracePeriod=30 Jan 22 12:17:43 crc kubenswrapper[4773]: I0122 12:17:43.647234 4773 generic.go:334] "Generic (PLEG): container finished" podID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerID="8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66" exitCode=143 Jan 22 12:17:43 crc kubenswrapper[4773]: I0122 12:17:43.647347 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66b1ac35-e977-43f0-8538-fb7da5bafde0","Type":"ContainerDied","Data":"8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66"} Jan 22 12:17:43 crc kubenswrapper[4773]: I0122 12:17:43.967328 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mvmfj"] Jan 22 12:17:43 crc kubenswrapper[4773]: I0122 12:17:43.969834 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:43 crc kubenswrapper[4773]: I0122 12:17:43.977786 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvmfj"] Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.146575 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbcxt\" (UniqueName: \"kubernetes.io/projected/35b8ff0c-2b27-46ce-8c24-32794fd745a1-kube-api-access-wbcxt\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.146620 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-utilities\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.146646 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-catalog-content\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.249297 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbcxt\" (UniqueName: \"kubernetes.io/projected/35b8ff0c-2b27-46ce-8c24-32794fd745a1-kube-api-access-wbcxt\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.249358 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-utilities\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.249401 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-catalog-content\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.249947 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-catalog-content\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.249984 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-utilities\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.290307 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbcxt\" (UniqueName: \"kubernetes.io/projected/35b8ff0c-2b27-46ce-8c24-32794fd745a1-kube-api-access-wbcxt\") pod \"redhat-operators-mvmfj\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.290832 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.790173 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvmfj"] Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.994217 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.996351 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-central-agent" containerID="cri-o://f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef" gracePeriod=30 Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.996925 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="proxy-httpd" containerID="cri-o://e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d" gracePeriod=30 Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.997124 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-notification-agent" containerID="cri-o://acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec" gracePeriod=30 Jan 22 12:17:44 crc kubenswrapper[4773]: I0122 12:17:44.997176 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="sg-core" containerID="cri-o://3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a" gracePeriod=30 Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.256860 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384263 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-public-tls-certs\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384369 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-logs\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384393 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-combined-ca-bundle\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384469 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-config-data\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384638 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv5gm\" (UniqueName: \"kubernetes.io/projected/c5afcb43-61a0-47ad-923e-ca320d6ca49f-kube-api-access-wv5gm\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384681 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-httpd-run\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384698 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-scripts\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.384758 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\" (UID: \"c5afcb43-61a0-47ad-923e-ca320d6ca49f\") " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.387301 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.387594 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-logs" (OuterVolumeSpecName: "logs") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.391702 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.396297 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-scripts" (OuterVolumeSpecName: "scripts") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.407249 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5afcb43-61a0-47ad-923e-ca320d6ca49f-kube-api-access-wv5gm" (OuterVolumeSpecName: "kube-api-access-wv5gm") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "kube-api-access-wv5gm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.484603 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.487848 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.487890 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.487903 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.487918 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv5gm\" (UniqueName: \"kubernetes.io/projected/c5afcb43-61a0-47ad-923e-ca320d6ca49f-kube-api-access-wv5gm\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.487931 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c5afcb43-61a0-47ad-923e-ca320d6ca49f-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.487942 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.493380 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.523161 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.563592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-config-data" (OuterVolumeSpecName: "config-data") pod "c5afcb43-61a0-47ad-923e-ca320d6ca49f" (UID: "c5afcb43-61a0-47ad-923e-ca320d6ca49f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.589777 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.589811 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.589824 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5afcb43-61a0-47ad-923e-ca320d6ca49f-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.669794 4773 generic.go:334] "Generic (PLEG): container finished" podID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerID="e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89" exitCode=0 Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.669854 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c5afcb43-61a0-47ad-923e-ca320d6ca49f","Type":"ContainerDied","Data":"e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.669883 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c5afcb43-61a0-47ad-923e-ca320d6ca49f","Type":"ContainerDied","Data":"226252d5dc51b11698c2f0c7c9c92dd73a7fe7d37ac97d38307ddbd6d85b67d2"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.669901 4773 scope.go:117] "RemoveContainer" containerID="e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.670023 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.687849 4773 generic.go:334] "Generic (PLEG): container finished" podID="2e6c44d1-387c-490e-99d8-25077af18d64" containerID="e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d" exitCode=0 Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.687889 4773 generic.go:334] "Generic (PLEG): container finished" podID="2e6c44d1-387c-490e-99d8-25077af18d64" containerID="3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a" exitCode=2 Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.687920 4773 generic.go:334] "Generic (PLEG): container finished" podID="2e6c44d1-387c-490e-99d8-25077af18d64" containerID="acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec" exitCode=0 Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.687930 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerDied","Data":"e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.687959 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerDied","Data":"3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.687970 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerDied","Data":"acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.689864 4773 generic.go:334] "Generic (PLEG): container finished" podID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerID="d7a16cadbf308b4538c31fe143916568748fd2705d39fc7bdab52238289b82e0" exitCode=0 Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.689889 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerDied","Data":"d7a16cadbf308b4538c31fe143916568748fd2705d39fc7bdab52238289b82e0"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.689903 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerStarted","Data":"dcb68d7f52919a5e641ceeadba039afe3ed49dd3cb5a998cf96c389e5fecabe4"} Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.691635 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.723709 4773 scope.go:117] "RemoveContainer" containerID="62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.730834 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.743880 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.757203 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:17:45 crc kubenswrapper[4773]: E0122 12:17:45.757736 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-httpd" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.757755 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-httpd" Jan 22 12:17:45 crc kubenswrapper[4773]: E0122 12:17:45.757770 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-log" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.757776 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-log" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.757949 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-log" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.757978 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" containerName="glance-httpd" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.759006 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.761012 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.761257 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.770551 4773 scope.go:117] "RemoveContainer" containerID="e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89" Jan 22 12:17:45 crc kubenswrapper[4773]: E0122 12:17:45.771115 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89\": container with ID starting with e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89 not found: ID does not exist" containerID="e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.771154 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89"} err="failed to get container status \"e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89\": rpc error: code = NotFound desc = could not find container \"e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89\": container with ID starting with e0924ddb10c7e40d89685c54ed6aeb7d7a8edc28249c54bea1999a2b871f9f89 not found: ID does not exist" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.771181 4773 scope.go:117] "RemoveContainer" containerID="62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f" Jan 22 12:17:45 crc kubenswrapper[4773]: E0122 12:17:45.780133 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f\": container with ID starting with 62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f not found: ID does not exist" containerID="62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.780181 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f"} err="failed to get container status \"62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f\": rpc error: code = NotFound desc = could not find container \"62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f\": container with ID starting with 62a809250ea715b7545720b8c98b1d4eacbcabc63db058498f2e5801c049400f not found: ID does not exist" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.833014 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895216 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-config-data\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895450 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895518 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-logs\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895588 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr222\" (UniqueName: \"kubernetes.io/projected/f681589a-ad29-4485-9313-7e63da547635-kube-api-access-rr222\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895612 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-scripts\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895640 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895722 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.895756 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.997856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-logs\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.997950 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr222\" (UniqueName: \"kubernetes.io/projected/f681589a-ad29-4485-9313-7e63da547635-kube-api-access-rr222\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.997983 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-scripts\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998025 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998123 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998164 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998314 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-config-data\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998425 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998456 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998460 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-logs\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:45 crc kubenswrapper[4773]: I0122 12:17:45.998766 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.003071 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.004605 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-config-data\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.005131 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-scripts\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.008423 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.017742 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr222\" (UniqueName: \"kubernetes.io/projected/f681589a-ad29-4485-9313-7e63da547635-kube-api-access-rr222\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.036522 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.085150 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.548133 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.671931 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5afcb43-61a0-47ad-923e-ca320d6ca49f" path="/var/lib/kubelet/pods/c5afcb43-61a0-47ad-923e-ca320d6ca49f/volumes" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.706760 4773 generic.go:334] "Generic (PLEG): container finished" podID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerID="37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db" exitCode=0 Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.706943 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66b1ac35-e977-43f0-8538-fb7da5bafde0","Type":"ContainerDied","Data":"37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db"} Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.707187 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"66b1ac35-e977-43f0-8538-fb7da5bafde0","Type":"ContainerDied","Data":"a40e1ae31ca9ceacefb2b5069d33728f1efea6e27518d03d884ab8453179bfea"} Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.707208 4773 scope.go:117] "RemoveContainer" containerID="37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.707009 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.711582 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.711699 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-logs\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.711872 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-config-data\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.711898 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvxtb\" (UniqueName: \"kubernetes.io/projected/66b1ac35-e977-43f0-8538-fb7da5bafde0-kube-api-access-tvxtb\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.711978 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-combined-ca-bundle\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.712050 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-internal-tls-certs\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.712121 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-scripts\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.712169 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-httpd-run\") pod \"66b1ac35-e977-43f0-8538-fb7da5bafde0\" (UID: \"66b1ac35-e977-43f0-8538-fb7da5bafde0\") " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.713251 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.779487 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-logs" (OuterVolumeSpecName: "logs") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.780456 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.780692 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66b1ac35-e977-43f0-8538-fb7da5bafde0-kube-api-access-tvxtb" (OuterVolumeSpecName: "kube-api-access-tvxtb") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "kube-api-access-tvxtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.780950 4773 scope.go:117] "RemoveContainer" containerID="8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.812943 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.814726 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-scripts" (OuterVolumeSpecName: "scripts") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.817567 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.820264 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.820301 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.820315 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.820336 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.820346 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66b1ac35-e977-43f0-8538-fb7da5bafde0-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.820358 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvxtb\" (UniqueName: \"kubernetes.io/projected/66b1ac35-e977-43f0-8538-fb7da5bafde0-kube-api-access-tvxtb\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.845545 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-config-data" (OuterVolumeSpecName: "config-data") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.854433 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66b1ac35-e977-43f0-8538-fb7da5bafde0" (UID: "66b1ac35-e977-43f0-8538-fb7da5bafde0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.862838 4773 scope.go:117] "RemoveContainer" containerID="37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db" Jan 22 12:17:46 crc kubenswrapper[4773]: E0122 12:17:46.864820 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db\": container with ID starting with 37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db not found: ID does not exist" containerID="37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.864910 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db"} err="failed to get container status \"37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db\": rpc error: code = NotFound desc = could not find container \"37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db\": container with ID starting with 37b3c172cbc4ff835a36f59bab0eae5c4789ae4e7e6df666e420d804dfcde4db not found: ID does not exist" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.865064 4773 scope.go:117] "RemoveContainer" containerID="8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66" Jan 22 12:17:46 crc kubenswrapper[4773]: E0122 12:17:46.866816 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66\": container with ID starting with 8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66 not found: ID does not exist" containerID="8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.866952 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66"} err="failed to get container status \"8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66\": rpc error: code = NotFound desc = could not find container \"8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66\": container with ID starting with 8b7e3bc24eebd0a78ec08ceea26182613e0d300d43dd831e2e3b571928f96d66 not found: ID does not exist" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.868780 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.921730 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.921775 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b1ac35-e977-43f0-8538-fb7da5bafde0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:46 crc kubenswrapper[4773]: I0122 12:17:46.921790 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.063064 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.079197 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.124139 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:17:47 crc kubenswrapper[4773]: E0122 12:17:47.125276 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-log" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.125320 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-log" Jan 22 12:17:47 crc kubenswrapper[4773]: E0122 12:17:47.125337 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-httpd" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.125346 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-httpd" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.125610 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-log" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.125628 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" containerName="glance-httpd" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.126936 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.133763 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.134032 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.167345 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237257 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237345 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237376 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqqtd\" (UniqueName: \"kubernetes.io/projected/b9c73637-566a-47b5-bba6-97948a973a47-kube-api-access-jqqtd\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237415 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237484 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237661 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237781 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-logs\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.237835 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339116 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-logs\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339435 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339608 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339693 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339778 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqqtd\" (UniqueName: \"kubernetes.io/projected/b9c73637-566a-47b5-bba6-97948a973a47-kube-api-access-jqqtd\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339862 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.339928 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.340017 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.341810 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.344556 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.344674 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-logs\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.348556 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.353599 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.354098 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.354996 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.376630 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqqtd\" (UniqueName: \"kubernetes.io/projected/b9c73637-566a-47b5-bba6-97948a973a47-kube-api-access-jqqtd\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.403201 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.464323 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.803855 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f681589a-ad29-4485-9313-7e63da547635","Type":"ContainerStarted","Data":"1ace24924790e623340b7b164a17c7319a67eb86e4eb575bbc7e58973674c6d7"} Jan 22 12:17:47 crc kubenswrapper[4773]: I0122 12:17:47.807326 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerStarted","Data":"38f370dad119a4a4221dddd0b7d8a2d9daa3e13f9e9aee5e2d344c1fcb19d839"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.132672 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.672756 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66b1ac35-e977-43f0-8538-fb7da5bafde0" path="/var/lib/kubelet/pods/66b1ac35-e977-43f0-8538-fb7da5bafde0/volumes" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.687309 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.820902 4773 generic.go:334] "Generic (PLEG): container finished" podID="2e6c44d1-387c-490e-99d8-25077af18d64" containerID="f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef" exitCode=0 Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.820974 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerDied","Data":"f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.821014 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2e6c44d1-387c-490e-99d8-25077af18d64","Type":"ContainerDied","Data":"d40bcc8474dd6268ab77f51efc57af36e9d6ba594985bc3d706c19a1ca51dbf6"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.821016 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.821051 4773 scope.go:117] "RemoveContainer" containerID="e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.824320 4773 generic.go:334] "Generic (PLEG): container finished" podID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerID="38f370dad119a4a4221dddd0b7d8a2d9daa3e13f9e9aee5e2d344c1fcb19d839" exitCode=0 Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.824384 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerDied","Data":"38f370dad119a4a4221dddd0b7d8a2d9daa3e13f9e9aee5e2d344c1fcb19d839"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.828639 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9c73637-566a-47b5-bba6-97948a973a47","Type":"ContainerStarted","Data":"225a2f34a32f0ca2dad3cbb6e121a6b3d564e544fe5bb8cd325dea73684c25dc"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.831361 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f681589a-ad29-4485-9313-7e63da547635","Type":"ContainerStarted","Data":"ec104a46c303372809d61350bbc1b44c16590837f54cef8d940afde155b334e5"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.831399 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f681589a-ad29-4485-9313-7e63da547635","Type":"ContainerStarted","Data":"b23e36a154e8bd280a582d1bff462b1409937c638edaa9eeeed72ca40585f7bc"} Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.844967 4773 scope.go:117] "RemoveContainer" containerID="3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.862437 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.862414012 podStartE2EDuration="3.862414012s" podCreationTimestamp="2026-01-22 12:17:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:17:48.857639418 +0000 UTC m=+1376.435755243" watchObservedRunningTime="2026-01-22 12:17:48.862414012 +0000 UTC m=+1376.440529837" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.873644 4773 scope.go:117] "RemoveContainer" containerID="acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875330 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t75nw\" (UniqueName: \"kubernetes.io/projected/2e6c44d1-387c-490e-99d8-25077af18d64-kube-api-access-t75nw\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875560 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-log-httpd\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875628 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-run-httpd\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875677 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875748 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-config-data\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875785 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-scripts\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.875850 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-sg-core-conf-yaml\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.876783 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.877080 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.881741 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e6c44d1-387c-490e-99d8-25077af18d64-kube-api-access-t75nw" (OuterVolumeSpecName: "kube-api-access-t75nw") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "kube-api-access-t75nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.891496 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-scripts" (OuterVolumeSpecName: "scripts") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.902753 4773 scope.go:117] "RemoveContainer" containerID="f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.948609 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.978809 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: W0122 12:17:48.980684 4773 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/2e6c44d1-387c-490e-99d8-25077af18d64/volumes/kubernetes.io~secret/combined-ca-bundle Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.980729 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.982178 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle\") pod \"2e6c44d1-387c-490e-99d8-25077af18d64\" (UID: \"2e6c44d1-387c-490e-99d8-25077af18d64\") " Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.982979 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.983001 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.983012 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t75nw\" (UniqueName: \"kubernetes.io/projected/2e6c44d1-387c-490e-99d8-25077af18d64-kube-api-access-t75nw\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.983024 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.983036 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2e6c44d1-387c-490e-99d8-25077af18d64-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:48 crc kubenswrapper[4773]: I0122 12:17:48.983046 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.013955 4773 scope.go:117] "RemoveContainer" containerID="e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.014060 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-config-data" (OuterVolumeSpecName: "config-data") pod "2e6c44d1-387c-490e-99d8-25077af18d64" (UID: "2e6c44d1-387c-490e-99d8-25077af18d64"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.021332 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d\": container with ID starting with e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d not found: ID does not exist" containerID="e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.021381 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d"} err="failed to get container status \"e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d\": rpc error: code = NotFound desc = could not find container \"e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d\": container with ID starting with e0faf2eb9d8edd583d89d791aeb0e4e3f1dc6122464e056e074478bfc446242d not found: ID does not exist" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.021410 4773 scope.go:117] "RemoveContainer" containerID="3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.022698 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a\": container with ID starting with 3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a not found: ID does not exist" containerID="3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.022727 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a"} err="failed to get container status \"3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a\": rpc error: code = NotFound desc = could not find container \"3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a\": container with ID starting with 3f3e8dc4381e974171ad605c515457cd51150a8e9e80e9ee5ffd69cda3af274a not found: ID does not exist" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.022746 4773 scope.go:117] "RemoveContainer" containerID="acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.023401 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec\": container with ID starting with acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec not found: ID does not exist" containerID="acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.023466 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec"} err="failed to get container status \"acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec\": rpc error: code = NotFound desc = could not find container \"acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec\": container with ID starting with acc0c166bd9567e1644b0bf83b7f0e2346ffc99b5f2c6008fca0b0f1710b4bec not found: ID does not exist" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.023488 4773 scope.go:117] "RemoveContainer" containerID="f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.023976 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef\": container with ID starting with f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef not found: ID does not exist" containerID="f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.024006 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef"} err="failed to get container status \"f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef\": rpc error: code = NotFound desc = could not find container \"f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef\": container with ID starting with f88c3e6422458580573cbc58ef0c8d6428aa854a203f09c4b3c9e5b2059f08ef not found: ID does not exist" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.084808 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e6c44d1-387c-490e-99d8-25077af18d64-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.164276 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.185182 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.203517 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.203974 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="proxy-httpd" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.203992 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="proxy-httpd" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.204010 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="sg-core" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204017 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="sg-core" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.204029 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-notification-agent" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204035 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-notification-agent" Jan 22 12:17:49 crc kubenswrapper[4773]: E0122 12:17:49.204048 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-central-agent" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204055 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-central-agent" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204217 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-central-agent" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204235 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="ceilometer-notification-agent" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204244 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="sg-core" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.204257 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" containerName="proxy-httpd" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.205824 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.208125 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.214027 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.250079 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396525 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkmxb\" (UniqueName: \"kubernetes.io/projected/d428f354-8bb3-4be8-9108-407bb678e12b-kube-api-access-gkmxb\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396599 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-log-httpd\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396633 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-config-data\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396819 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-scripts\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396891 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396913 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-run-httpd\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.396973 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498361 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498401 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-run-httpd\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498465 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498537 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkmxb\" (UniqueName: \"kubernetes.io/projected/d428f354-8bb3-4be8-9108-407bb678e12b-kube-api-access-gkmxb\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498569 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-log-httpd\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498588 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-config-data\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.498621 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-scripts\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.499546 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-log-httpd\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.499830 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-run-httpd\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.503484 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.503515 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-config-data\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.503484 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-scripts\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.510233 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.518232 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkmxb\" (UniqueName: \"kubernetes.io/projected/d428f354-8bb3-4be8-9108-407bb678e12b-kube-api-access-gkmxb\") pod \"ceilometer-0\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.534099 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.846313 4773 generic.go:334] "Generic (PLEG): container finished" podID="e18e0db7-6f23-4114-82cd-3d1d393db415" containerID="27c3e0c464e09904962671874cb61cd552b7a21244651c606261999643bca822" exitCode=0 Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.846537 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" event={"ID":"e18e0db7-6f23-4114-82cd-3d1d393db415","Type":"ContainerDied","Data":"27c3e0c464e09904962671874cb61cd552b7a21244651c606261999643bca822"} Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.849225 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerStarted","Data":"725c252fdefeb7e5f7b42f821862d39bc045416978fed416610658e475ae8d85"} Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.862016 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9c73637-566a-47b5-bba6-97948a973a47","Type":"ContainerStarted","Data":"6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157"} Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.862081 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9c73637-566a-47b5-bba6-97948a973a47","Type":"ContainerStarted","Data":"3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e"} Jan 22 12:17:49 crc kubenswrapper[4773]: I0122 12:17:49.913871 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.913840955 podStartE2EDuration="2.913840955s" podCreationTimestamp="2026-01-22 12:17:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:17:49.9033604 +0000 UTC m=+1377.481476235" watchObservedRunningTime="2026-01-22 12:17:49.913840955 +0000 UTC m=+1377.491956780" Jan 22 12:17:50 crc kubenswrapper[4773]: I0122 12:17:50.024798 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mvmfj" podStartSLOduration=3.473061506 podStartE2EDuration="7.024781456s" podCreationTimestamp="2026-01-22 12:17:43 +0000 UTC" firstStartedPulling="2026-01-22 12:17:45.691427511 +0000 UTC m=+1373.269543336" lastFinishedPulling="2026-01-22 12:17:49.243147461 +0000 UTC m=+1376.821263286" observedRunningTime="2026-01-22 12:17:49.941961486 +0000 UTC m=+1377.520077311" watchObservedRunningTime="2026-01-22 12:17:50.024781456 +0000 UTC m=+1377.602897271" Jan 22 12:17:50 crc kubenswrapper[4773]: I0122 12:17:50.026489 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:17:50 crc kubenswrapper[4773]: I0122 12:17:50.668696 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e6c44d1-387c-490e-99d8-25077af18d64" path="/var/lib/kubelet/pods/2e6c44d1-387c-490e-99d8-25077af18d64/volumes" Jan 22 12:17:50 crc kubenswrapper[4773]: I0122 12:17:50.874662 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerStarted","Data":"6e142c1a82433b35b2c5fb8e1274496feb8364d5cdaeb4183cae070a2144dc94"} Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.239380 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.339105 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-scripts\") pod \"e18e0db7-6f23-4114-82cd-3d1d393db415\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.339183 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wwnr\" (UniqueName: \"kubernetes.io/projected/e18e0db7-6f23-4114-82cd-3d1d393db415-kube-api-access-9wwnr\") pod \"e18e0db7-6f23-4114-82cd-3d1d393db415\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.339390 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-combined-ca-bundle\") pod \"e18e0db7-6f23-4114-82cd-3d1d393db415\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.339460 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-config-data\") pod \"e18e0db7-6f23-4114-82cd-3d1d393db415\" (UID: \"e18e0db7-6f23-4114-82cd-3d1d393db415\") " Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.355350 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-scripts" (OuterVolumeSpecName: "scripts") pod "e18e0db7-6f23-4114-82cd-3d1d393db415" (UID: "e18e0db7-6f23-4114-82cd-3d1d393db415"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.355461 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e18e0db7-6f23-4114-82cd-3d1d393db415-kube-api-access-9wwnr" (OuterVolumeSpecName: "kube-api-access-9wwnr") pod "e18e0db7-6f23-4114-82cd-3d1d393db415" (UID: "e18e0db7-6f23-4114-82cd-3d1d393db415"). InnerVolumeSpecName "kube-api-access-9wwnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.372176 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-config-data" (OuterVolumeSpecName: "config-data") pod "e18e0db7-6f23-4114-82cd-3d1d393db415" (UID: "e18e0db7-6f23-4114-82cd-3d1d393db415"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.376327 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e18e0db7-6f23-4114-82cd-3d1d393db415" (UID: "e18e0db7-6f23-4114-82cd-3d1d393db415"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.441623 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.441660 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.441668 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e18e0db7-6f23-4114-82cd-3d1d393db415-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.441681 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wwnr\" (UniqueName: \"kubernetes.io/projected/e18e0db7-6f23-4114-82cd-3d1d393db415-kube-api-access-9wwnr\") on node \"crc\" DevicePath \"\"" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.884032 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" event={"ID":"e18e0db7-6f23-4114-82cd-3d1d393db415","Type":"ContainerDied","Data":"0a9c7ab24425325496a715763ac3b01e7149358b0496c174d13c72d28ff15e0e"} Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.884079 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a9c7ab24425325496a715763ac3b01e7149358b0496c174d13c72d28ff15e0e" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.884138 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-n2bkh" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.991014 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 12:17:51 crc kubenswrapper[4773]: E0122 12:17:51.991885 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e18e0db7-6f23-4114-82cd-3d1d393db415" containerName="nova-cell0-conductor-db-sync" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.991912 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e18e0db7-6f23-4114-82cd-3d1d393db415" containerName="nova-cell0-conductor-db-sync" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.992129 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e18e0db7-6f23-4114-82cd-3d1d393db415" containerName="nova-cell0-conductor-db-sync" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.992907 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.994849 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xjgc7" Jan 22 12:17:51 crc kubenswrapper[4773]: I0122 12:17:51.995840 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.004422 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.154071 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.154242 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.154423 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg94q\" (UniqueName: \"kubernetes.io/projected/1683acba-c129-4a7c-866c-421cdb0e6505-kube-api-access-qg94q\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.256462 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg94q\" (UniqueName: \"kubernetes.io/projected/1683acba-c129-4a7c-866c-421cdb0e6505-kube-api-access-qg94q\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.256995 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.257847 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.261962 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.262785 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.280313 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg94q\" (UniqueName: \"kubernetes.io/projected/1683acba-c129-4a7c-866c-421cdb0e6505-kube-api-access-qg94q\") pod \"nova-cell0-conductor-0\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.514117 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:52 crc kubenswrapper[4773]: I0122 12:17:52.894906 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerStarted","Data":"2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6"} Jan 22 12:17:53 crc kubenswrapper[4773]: I0122 12:17:53.053988 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 12:17:53 crc kubenswrapper[4773]: I0122 12:17:53.928683 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1683acba-c129-4a7c-866c-421cdb0e6505","Type":"ContainerStarted","Data":"8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0"} Jan 22 12:17:53 crc kubenswrapper[4773]: I0122 12:17:53.929534 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1683acba-c129-4a7c-866c-421cdb0e6505","Type":"ContainerStarted","Data":"61976de158339a4cc841c341aaa4bdc66e39f398875b71d0a1ee61f7639234b8"} Jan 22 12:17:53 crc kubenswrapper[4773]: I0122 12:17:53.929784 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 22 12:17:53 crc kubenswrapper[4773]: I0122 12:17:53.954100 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.954075036 podStartE2EDuration="2.954075036s" podCreationTimestamp="2026-01-22 12:17:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:17:53.945794503 +0000 UTC m=+1381.523910338" watchObservedRunningTime="2026-01-22 12:17:53.954075036 +0000 UTC m=+1381.532190861" Jan 22 12:17:54 crc kubenswrapper[4773]: I0122 12:17:54.291904 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:54 crc kubenswrapper[4773]: I0122 12:17:54.291970 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:17:55 crc kubenswrapper[4773]: I0122 12:17:55.343835 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mvmfj" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="registry-server" probeResult="failure" output=< Jan 22 12:17:55 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 12:17:55 crc kubenswrapper[4773]: > Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.086051 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.087374 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.124863 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.132040 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.962316 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerStarted","Data":"581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26"} Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.962654 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.962677 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 12:17:56 crc kubenswrapper[4773]: I0122 12:17:56.962689 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerStarted","Data":"84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60"} Jan 22 12:17:57 crc kubenswrapper[4773]: I0122 12:17:57.466336 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:57 crc kubenswrapper[4773]: I0122 12:17:57.466393 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:57 crc kubenswrapper[4773]: I0122 12:17:57.498890 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:57 crc kubenswrapper[4773]: I0122 12:17:57.521771 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:57 crc kubenswrapper[4773]: I0122 12:17:57.972845 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:57 crc kubenswrapper[4773]: I0122 12:17:57.973431 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 12:17:58 crc kubenswrapper[4773]: I0122 12:17:58.697630 4773 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","poda3132a41-c2bd-4cbb-b319-25982363decb"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort poda3132a41-c2bd-4cbb-b319-25982363decb] : Timed out while waiting for systemd to remove kubepods-besteffort-poda3132a41_c2bd_4cbb_b319_25982363decb.slice" Jan 22 12:17:58 crc kubenswrapper[4773]: I0122 12:17:58.988701 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerStarted","Data":"8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea"} Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.016857 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.913566575 podStartE2EDuration="10.016832587s" podCreationTimestamp="2026-01-22 12:17:49 +0000 UTC" firstStartedPulling="2026-01-22 12:17:50.03203936 +0000 UTC m=+1377.610155185" lastFinishedPulling="2026-01-22 12:17:58.135305372 +0000 UTC m=+1385.713421197" observedRunningTime="2026-01-22 12:17:59.0080522 +0000 UTC m=+1386.586168055" watchObservedRunningTime="2026-01-22 12:17:59.016832587 +0000 UTC m=+1386.594948412" Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.243149 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.243576 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.244229 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.997235 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.997271 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 12:17:59 crc kubenswrapper[4773]: I0122 12:17:59.997912 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:18:00 crc kubenswrapper[4773]: I0122 12:18:00.304171 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 12:18:00 crc kubenswrapper[4773]: I0122 12:18:00.342211 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 12:18:02 crc kubenswrapper[4773]: I0122 12:18:02.541802 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.118144 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-b7znb"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.119836 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.123840 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.124084 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.137758 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-b7znb"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.148147 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-config-data\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.148212 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktsg2\" (UniqueName: \"kubernetes.io/projected/b8d8e91e-77e9-430e-b7c3-da6898073d0a-kube-api-access-ktsg2\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.148260 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.148391 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-scripts\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.250457 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-config-data\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.250523 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktsg2\" (UniqueName: \"kubernetes.io/projected/b8d8e91e-77e9-430e-b7c3-da6898073d0a-kube-api-access-ktsg2\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.250573 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.250630 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-scripts\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.258433 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-config-data\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.274055 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-scripts\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.275577 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.311926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktsg2\" (UniqueName: \"kubernetes.io/projected/b8d8e91e-77e9-430e-b7c3-da6898073d0a-kube-api-access-ktsg2\") pod \"nova-cell0-cell-mapping-b7znb\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.331437 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.334053 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.337529 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.344004 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.377329 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.386578 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.390520 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.437497 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.439135 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.440218 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.452854 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.458362 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.458795 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-config-data\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.459895 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.460065 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7zxh\" (UniqueName: \"kubernetes.io/projected/bda04167-ca9d-4b3f-a3b3-9f504d52539f-kube-api-access-g7zxh\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.460218 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda04167-ca9d-4b3f-a3b3-9f504d52539f-logs\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.460336 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-config-data\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.460469 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwt7g\" (UniqueName: \"kubernetes.io/projected/f03f64d9-1e8a-43ae-bc50-762a379cf20b-kube-api-access-bwt7g\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.476473 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.511484 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564106 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwt7g\" (UniqueName: \"kubernetes.io/projected/f03f64d9-1e8a-43ae-bc50-762a379cf20b-kube-api-access-bwt7g\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564191 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564278 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564395 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvtcj\" (UniqueName: \"kubernetes.io/projected/22d6745b-199e-4c87-835a-e3bc633b67cb-kube-api-access-lvtcj\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564431 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564459 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-config-data\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564605 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7zxh\" (UniqueName: \"kubernetes.io/projected/bda04167-ca9d-4b3f-a3b3-9f504d52539f-kube-api-access-g7zxh\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564666 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda04167-ca9d-4b3f-a3b3-9f504d52539f-logs\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.564690 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-config-data\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.569643 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda04167-ca9d-4b3f-a3b3-9f504d52539f-logs\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.572841 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.589239 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-config-data\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.612634 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-config-data\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.616570 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.617519 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.618139 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.623088 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7zxh\" (UniqueName: \"kubernetes.io/projected/bda04167-ca9d-4b3f-a3b3-9f504d52539f-kube-api-access-g7zxh\") pod \"nova-api-0\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.629330 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwt7g\" (UniqueName: \"kubernetes.io/projected/f03f64d9-1e8a-43ae-bc50-762a379cf20b-kube-api-access-bwt7g\") pod \"nova-scheduler-0\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.630802 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674001 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvtcj\" (UniqueName: \"kubernetes.io/projected/22d6745b-199e-4c87-835a-e3bc633b67cb-kube-api-access-lvtcj\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674042 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674086 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674159 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vrzm\" (UniqueName: \"kubernetes.io/projected/3c3c2963-896c-4841-9148-16e573d4b678-kube-api-access-5vrzm\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674201 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3c2963-896c-4841-9148-16e573d4b678-logs\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.674803 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-config-data\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.681615 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.691157 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.696355 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.701092 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvtcj\" (UniqueName: \"kubernetes.io/projected/22d6745b-199e-4c87-835a-e3bc633b67cb-kube-api-access-lvtcj\") pod \"nova-cell1-novncproxy-0\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.772593 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.776310 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-vsznr"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.778343 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.780082 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-config-data\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.780162 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.780224 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vrzm\" (UniqueName: \"kubernetes.io/projected/3c3c2963-896c-4841-9148-16e573d4b678-kube-api-access-5vrzm\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.780275 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3c2963-896c-4841-9148-16e573d4b678-logs\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.780709 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3c2963-896c-4841-9148-16e573d4b678-logs\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.784807 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-vsznr"] Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.788516 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.791195 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-config-data\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.797498 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.803719 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.825015 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vrzm\" (UniqueName: \"kubernetes.io/projected/3c3c2963-896c-4841-9148-16e573d4b678-kube-api-access-5vrzm\") pod \"nova-metadata-0\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " pod="openstack/nova-metadata-0" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.884605 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-config\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.884900 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.884966 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.884985 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnjjb\" (UniqueName: \"kubernetes.io/projected/62748add-a2e1-49d7-bce6-3ce301867493-kube-api-access-pnjjb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.885025 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.885207 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.986870 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-config\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.987042 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.987078 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.987103 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnjjb\" (UniqueName: \"kubernetes.io/projected/62748add-a2e1-49d7-bce6-3ce301867493-kube-api-access-pnjjb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.987134 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.987188 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.988010 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-config\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.988573 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.988713 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.989368 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.989731 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:03 crc kubenswrapper[4773]: I0122 12:18:03.996505 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.019537 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnjjb\" (UniqueName: \"kubernetes.io/projected/62748add-a2e1-49d7-bce6-3ce301867493-kube-api-access-pnjjb\") pod \"dnsmasq-dns-647df7b8c5-vsznr\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.073967 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.076404 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.076450 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.077185 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e8c9c494f3234f8ef97d1206589812616f50e7a5ed49d844fa29e97c95590447"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.077243 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://e8c9c494f3234f8ef97d1206589812616f50e7a5ed49d844fa29e97c95590447" gracePeriod=600 Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.128367 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.348971 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-b7znb"] Jan 22 12:18:04 crc kubenswrapper[4773]: W0122 12:18:04.357100 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8d8e91e_77e9_430e_b7c3_da6898073d0a.slice/crio-af2eac9bd539470b28a19c72ea215eab8cb641d1275dd8b27f587189c4dda089 WatchSource:0}: Error finding container af2eac9bd539470b28a19c72ea215eab8cb641d1275dd8b27f587189c4dda089: Status 404 returned error can't find the container with id af2eac9bd539470b28a19c72ea215eab8cb641d1275dd8b27f587189c4dda089 Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.387029 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.478034 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jh5jz"] Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.479585 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.483039 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.483449 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.489643 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.495220 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jh5jz"] Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.619564 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-config-data\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.619981 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.620052 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/9031d071-6292-4367-abab-a0be539a3914-kube-api-access-7z5wk\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.620091 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-scripts\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.726474 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvmfj"] Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.729907 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-config-data\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.730085 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.730228 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/9031d071-6292-4367-abab-a0be539a3914-kube-api-access-7z5wk\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.730299 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-scripts\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.750144 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.750272 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-config-data\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.762648 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-scripts\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.769354 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.770117 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/9031d071-6292-4367-abab-a0be539a3914-kube-api-access-7z5wk\") pod \"nova-cell1-conductor-db-sync-jh5jz\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.792415 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.804489 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:04 crc kubenswrapper[4773]: W0122 12:18:04.806506 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22d6745b_199e_4c87_835a_e3bc633b67cb.slice/crio-7df95a5bf4b203f5d192f97d14c8c24918a7e9a036ef96cce7c94976d790b113 WatchSource:0}: Error finding container 7df95a5bf4b203f5d192f97d14c8c24918a7e9a036ef96cce7c94976d790b113: Status 404 returned error can't find the container with id 7df95a5bf4b203f5d192f97d14c8c24918a7e9a036ef96cce7c94976d790b113 Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.834253 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.868395 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:04 crc kubenswrapper[4773]: W0122 12:18:04.880021 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c3c2963_896c_4841_9148_16e573d4b678.slice/crio-18a585ebf71b6a87b5fd46f5d62fe20ba12c63f90657e748293ee794d200cc53 WatchSource:0}: Error finding container 18a585ebf71b6a87b5fd46f5d62fe20ba12c63f90657e748293ee794d200cc53: Status 404 returned error can't find the container with id 18a585ebf71b6a87b5fd46f5d62fe20ba12c63f90657e748293ee794d200cc53 Jan 22 12:18:04 crc kubenswrapper[4773]: I0122 12:18:04.971831 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-vsznr"] Jan 22 12:18:04 crc kubenswrapper[4773]: W0122 12:18:04.990450 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62748add_a2e1_49d7_bce6_3ce301867493.slice/crio-6eb0742fb70addcd4c0e63a2b1709e4c9b8a2b580328e69ad9aeff6f61e940b2 WatchSource:0}: Error finding container 6eb0742fb70addcd4c0e63a2b1709e4c9b8a2b580328e69ad9aeff6f61e940b2: Status 404 returned error can't find the container with id 6eb0742fb70addcd4c0e63a2b1709e4c9b8a2b580328e69ad9aeff6f61e940b2 Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.102960 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"22d6745b-199e-4c87-835a-e3bc633b67cb","Type":"ContainerStarted","Data":"7df95a5bf4b203f5d192f97d14c8c24918a7e9a036ef96cce7c94976d790b113"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.119157 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c3c2963-896c-4841-9148-16e573d4b678","Type":"ContainerStarted","Data":"18a585ebf71b6a87b5fd46f5d62fe20ba12c63f90657e748293ee794d200cc53"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.121470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" event={"ID":"62748add-a2e1-49d7-bce6-3ce301867493","Type":"ContainerStarted","Data":"6eb0742fb70addcd4c0e63a2b1709e4c9b8a2b580328e69ad9aeff6f61e940b2"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.146364 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="e8c9c494f3234f8ef97d1206589812616f50e7a5ed49d844fa29e97c95590447" exitCode=0 Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.147595 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"e8c9c494f3234f8ef97d1206589812616f50e7a5ed49d844fa29e97c95590447"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.147658 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.147688 4773 scope.go:117] "RemoveContainer" containerID="7e399a29260ad477e9daad28c8b04af4cd0be5f90f0da32bc9266d74b4dcc71d" Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.150033 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bda04167-ca9d-4b3f-a3b3-9f504d52539f","Type":"ContainerStarted","Data":"73d859f0b20d7b964f88dc1cc74fabc2b009e99bc8b3fa164453548f45a1bbba"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.153701 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b7znb" event={"ID":"b8d8e91e-77e9-430e-b7c3-da6898073d0a","Type":"ContainerStarted","Data":"9d84b246a68941e41889b1877df1080c54dc73d0b7df6ecbeb2af3a992b9f448"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.153838 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b7znb" event={"ID":"b8d8e91e-77e9-430e-b7c3-da6898073d0a","Type":"ContainerStarted","Data":"af2eac9bd539470b28a19c72ea215eab8cb641d1275dd8b27f587189c4dda089"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.156849 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f03f64d9-1e8a-43ae-bc50-762a379cf20b","Type":"ContainerStarted","Data":"49bde181871f7e942e66a46636b36a91fd714453e257ff6d98df64a4a8f1d8c8"} Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.265506 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-b7znb" podStartSLOduration=2.265476804 podStartE2EDuration="2.265476804s" podCreationTimestamp="2026-01-22 12:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:05.229605755 +0000 UTC m=+1392.807721580" watchObservedRunningTime="2026-01-22 12:18:05.265476804 +0000 UTC m=+1392.843592629" Jan 22 12:18:05 crc kubenswrapper[4773]: I0122 12:18:05.421560 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jh5jz"] Jan 22 12:18:06 crc kubenswrapper[4773]: I0122 12:18:06.190709 4773 generic.go:334] "Generic (PLEG): container finished" podID="62748add-a2e1-49d7-bce6-3ce301867493" containerID="f51b77c36373f75f69e2c76f7e1cbb23b3add72fc954a2dbb38ddedc16f4bf93" exitCode=0 Jan 22 12:18:06 crc kubenswrapper[4773]: I0122 12:18:06.191124 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" event={"ID":"62748add-a2e1-49d7-bce6-3ce301867493","Type":"ContainerDied","Data":"f51b77c36373f75f69e2c76f7e1cbb23b3add72fc954a2dbb38ddedc16f4bf93"} Jan 22 12:18:06 crc kubenswrapper[4773]: I0122 12:18:06.203397 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" event={"ID":"9031d071-6292-4367-abab-a0be539a3914","Type":"ContainerStarted","Data":"9d65f24b9bce874786b86a52e97303e7993818c6f384ef400144e67856e5049f"} Jan 22 12:18:06 crc kubenswrapper[4773]: I0122 12:18:06.203483 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" event={"ID":"9031d071-6292-4367-abab-a0be539a3914","Type":"ContainerStarted","Data":"c5324fb6aeb0e489c860941f8d4eca074945cf3411bcad28d4271c99be1e2879"} Jan 22 12:18:06 crc kubenswrapper[4773]: I0122 12:18:06.207823 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mvmfj" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="registry-server" containerID="cri-o://725c252fdefeb7e5f7b42f821862d39bc045416978fed416610658e475ae8d85" gracePeriod=2 Jan 22 12:18:06 crc kubenswrapper[4773]: I0122 12:18:06.250662 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" podStartSLOduration=2.250643305 podStartE2EDuration="2.250643305s" podCreationTimestamp="2026-01-22 12:18:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:06.247497406 +0000 UTC m=+1393.825613241" watchObservedRunningTime="2026-01-22 12:18:06.250643305 +0000 UTC m=+1393.828759130" Jan 22 12:18:07 crc kubenswrapper[4773]: I0122 12:18:07.226094 4773 generic.go:334] "Generic (PLEG): container finished" podID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerID="725c252fdefeb7e5f7b42f821862d39bc045416978fed416610658e475ae8d85" exitCode=0 Jan 22 12:18:07 crc kubenswrapper[4773]: I0122 12:18:07.226757 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerDied","Data":"725c252fdefeb7e5f7b42f821862d39bc045416978fed416610658e475ae8d85"} Jan 22 12:18:07 crc kubenswrapper[4773]: I0122 12:18:07.665548 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:07 crc kubenswrapper[4773]: I0122 12:18:07.697456 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.766111 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.865749 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbcxt\" (UniqueName: \"kubernetes.io/projected/35b8ff0c-2b27-46ce-8c24-32794fd745a1-kube-api-access-wbcxt\") pod \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.866108 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-catalog-content\") pod \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.866130 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-utilities\") pod \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\" (UID: \"35b8ff0c-2b27-46ce-8c24-32794fd745a1\") " Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.867328 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-utilities" (OuterVolumeSpecName: "utilities") pod "35b8ff0c-2b27-46ce-8c24-32794fd745a1" (UID: "35b8ff0c-2b27-46ce-8c24-32794fd745a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.889407 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b8ff0c-2b27-46ce-8c24-32794fd745a1-kube-api-access-wbcxt" (OuterVolumeSpecName: "kube-api-access-wbcxt") pod "35b8ff0c-2b27-46ce-8c24-32794fd745a1" (UID: "35b8ff0c-2b27-46ce-8c24-32794fd745a1"). InnerVolumeSpecName "kube-api-access-wbcxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.968533 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbcxt\" (UniqueName: \"kubernetes.io/projected/35b8ff0c-2b27-46ce-8c24-32794fd745a1-kube-api-access-wbcxt\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.968560 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:08 crc kubenswrapper[4773]: I0122 12:18:08.985864 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35b8ff0c-2b27-46ce-8c24-32794fd745a1" (UID: "35b8ff0c-2b27-46ce-8c24-32794fd745a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.071055 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35b8ff0c-2b27-46ce-8c24-32794fd745a1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.256744 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" event={"ID":"62748add-a2e1-49d7-bce6-3ce301867493","Type":"ContainerStarted","Data":"9ad4648c049a10c681cba63fa794e244bec1c0c21ef84df78273c43f319ce9f6"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.257218 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.258579 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c3c2963-896c-4841-9148-16e573d4b678","Type":"ContainerStarted","Data":"b686d246d0a8cf9de82f312f514ede249020f303b6d960c4a02d1e1a09deb7c8"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.258612 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c3c2963-896c-4841-9148-16e573d4b678","Type":"ContainerStarted","Data":"f0bd586169e1175bd72e0ef2b569aa17aa4b2ac6f2536ea8fb680e61c23f04a1"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.258744 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-log" containerID="cri-o://f0bd586169e1175bd72e0ef2b569aa17aa4b2ac6f2536ea8fb680e61c23f04a1" gracePeriod=30 Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.258848 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-metadata" containerID="cri-o://b686d246d0a8cf9de82f312f514ede249020f303b6d960c4a02d1e1a09deb7c8" gracePeriod=30 Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.264214 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bda04167-ca9d-4b3f-a3b3-9f504d52539f","Type":"ContainerStarted","Data":"f63370a477f68606030ea8df66a044976dbdc9dcb3a2cddbd3a295382a8b62cc"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.264265 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bda04167-ca9d-4b3f-a3b3-9f504d52539f","Type":"ContainerStarted","Data":"bea10f58e6a51db6feed23f6a77e5897dc2fe883d0eee03f34a3cd0bf08561b6"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.268625 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvmfj" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.268616 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvmfj" event={"ID":"35b8ff0c-2b27-46ce-8c24-32794fd745a1","Type":"ContainerDied","Data":"dcb68d7f52919a5e641ceeadba039afe3ed49dd3cb5a998cf96c389e5fecabe4"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.268780 4773 scope.go:117] "RemoveContainer" containerID="725c252fdefeb7e5f7b42f821862d39bc045416978fed416610658e475ae8d85" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.275891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f03f64d9-1e8a-43ae-bc50-762a379cf20b","Type":"ContainerStarted","Data":"8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.281979 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"22d6745b-199e-4c87-835a-e3bc633b67cb","Type":"ContainerStarted","Data":"75756eaa172956b31b3f1c131ef68ec0ea677b6a7e06b13fb7b69fdee88d5e0b"} Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.282122 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="22d6745b-199e-4c87-835a-e3bc633b67cb" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://75756eaa172956b31b3f1c131ef68ec0ea677b6a7e06b13fb7b69fdee88d5e0b" gracePeriod=30 Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.291740 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" podStartSLOduration=6.291722721 podStartE2EDuration="6.291722721s" podCreationTimestamp="2026-01-22 12:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:09.280525686 +0000 UTC m=+1396.858641511" watchObservedRunningTime="2026-01-22 12:18:09.291722721 +0000 UTC m=+1396.869838546" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.311923 4773 scope.go:117] "RemoveContainer" containerID="38f370dad119a4a4221dddd0b7d8a2d9daa3e13f9e9aee5e2d344c1fcb19d839" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.313746 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.568395425 podStartE2EDuration="6.31372426s" podCreationTimestamp="2026-01-22 12:18:03 +0000 UTC" firstStartedPulling="2026-01-22 12:18:04.810080875 +0000 UTC m=+1392.388196700" lastFinishedPulling="2026-01-22 12:18:08.5554097 +0000 UTC m=+1396.133525535" observedRunningTime="2026-01-22 12:18:09.305715834 +0000 UTC m=+1396.883831659" watchObservedRunningTime="2026-01-22 12:18:09.31372426 +0000 UTC m=+1396.891840085" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.344895 4773 scope.go:117] "RemoveContainer" containerID="d7a16cadbf308b4538c31fe143916568748fd2705d39fc7bdab52238289b82e0" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.358989 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.688116361 podStartE2EDuration="6.358966632s" podCreationTimestamp="2026-01-22 12:18:03 +0000 UTC" firstStartedPulling="2026-01-22 12:18:04.88350298 +0000 UTC m=+1392.461618805" lastFinishedPulling="2026-01-22 12:18:08.554353251 +0000 UTC m=+1396.132469076" observedRunningTime="2026-01-22 12:18:09.340771651 +0000 UTC m=+1396.918887476" watchObservedRunningTime="2026-01-22 12:18:09.358966632 +0000 UTC m=+1396.937082457" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.369517 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.64113543 podStartE2EDuration="6.369491908s" podCreationTimestamp="2026-01-22 12:18:03 +0000 UTC" firstStartedPulling="2026-01-22 12:18:04.81631537 +0000 UTC m=+1392.394431195" lastFinishedPulling="2026-01-22 12:18:08.544671848 +0000 UTC m=+1396.122787673" observedRunningTime="2026-01-22 12:18:09.356511043 +0000 UTC m=+1396.934626878" watchObservedRunningTime="2026-01-22 12:18:09.369491908 +0000 UTC m=+1396.947607733" Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.418186 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvmfj"] Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.428358 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mvmfj"] Jan 22 12:18:09 crc kubenswrapper[4773]: I0122 12:18:09.431988 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.7096432679999998 podStartE2EDuration="6.431965466s" podCreationTimestamp="2026-01-22 12:18:03 +0000 UTC" firstStartedPulling="2026-01-22 12:18:04.822416942 +0000 UTC m=+1392.400532767" lastFinishedPulling="2026-01-22 12:18:08.54473914 +0000 UTC m=+1396.122854965" observedRunningTime="2026-01-22 12:18:09.396753905 +0000 UTC m=+1396.974869740" watchObservedRunningTime="2026-01-22 12:18:09.431965466 +0000 UTC m=+1397.010081291" Jan 22 12:18:10 crc kubenswrapper[4773]: I0122 12:18:10.296642 4773 generic.go:334] "Generic (PLEG): container finished" podID="3c3c2963-896c-4841-9148-16e573d4b678" containerID="f0bd586169e1175bd72e0ef2b569aa17aa4b2ac6f2536ea8fb680e61c23f04a1" exitCode=143 Jan 22 12:18:10 crc kubenswrapper[4773]: I0122 12:18:10.296758 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c3c2963-896c-4841-9148-16e573d4b678","Type":"ContainerDied","Data":"f0bd586169e1175bd72e0ef2b569aa17aa4b2ac6f2536ea8fb680e61c23f04a1"} Jan 22 12:18:10 crc kubenswrapper[4773]: I0122 12:18:10.670501 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" path="/var/lib/kubelet/pods/35b8ff0c-2b27-46ce-8c24-32794fd745a1/volumes" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.774609 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.774969 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.788918 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.789013 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.805485 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.822805 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.997527 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 12:18:13 crc kubenswrapper[4773]: I0122 12:18:13.997928 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.131273 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.234760 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-xw2mz"] Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.235467 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" containerName="dnsmasq-dns" containerID="cri-o://7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0" gracePeriod=10 Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.345135 4773 generic.go:334] "Generic (PLEG): container finished" podID="b8d8e91e-77e9-430e-b7c3-da6898073d0a" containerID="9d84b246a68941e41889b1877df1080c54dc73d0b7df6ecbeb2af3a992b9f448" exitCode=0 Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.345251 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b7znb" event={"ID":"b8d8e91e-77e9-430e-b7c3-da6898073d0a","Type":"ContainerDied","Data":"9d84b246a68941e41889b1877df1080c54dc73d0b7df6ecbeb2af3a992b9f448"} Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.352555 4773 generic.go:334] "Generic (PLEG): container finished" podID="9031d071-6292-4367-abab-a0be539a3914" containerID="9d65f24b9bce874786b86a52e97303e7993818c6f384ef400144e67856e5049f" exitCode=0 Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.354237 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" event={"ID":"9031d071-6292-4367-abab-a0be539a3914","Type":"ContainerDied","Data":"9d65f24b9bce874786b86a52e97303e7993818c6f384ef400144e67856e5049f"} Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.410168 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.788312 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.811865 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-config\") pod \"68b61a4c-4e01-4588-877f-91017ce2df24\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.812236 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-swift-storage-0\") pod \"68b61a4c-4e01-4588-877f-91017ce2df24\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.812432 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-svc\") pod \"68b61a4c-4e01-4588-877f-91017ce2df24\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.812620 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9vvc\" (UniqueName: \"kubernetes.io/projected/68b61a4c-4e01-4588-877f-91017ce2df24-kube-api-access-k9vvc\") pod \"68b61a4c-4e01-4588-877f-91017ce2df24\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.812856 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-nb\") pod \"68b61a4c-4e01-4588-877f-91017ce2df24\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.813352 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-sb\") pod \"68b61a4c-4e01-4588-877f-91017ce2df24\" (UID: \"68b61a4c-4e01-4588-877f-91017ce2df24\") " Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.827187 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68b61a4c-4e01-4588-877f-91017ce2df24-kube-api-access-k9vvc" (OuterVolumeSpecName: "kube-api-access-k9vvc") pod "68b61a4c-4e01-4588-877f-91017ce2df24" (UID: "68b61a4c-4e01-4588-877f-91017ce2df24"). InnerVolumeSpecName "kube-api-access-k9vvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.857484 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.857832 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.910268 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "68b61a4c-4e01-4588-877f-91017ce2df24" (UID: "68b61a4c-4e01-4588-877f-91017ce2df24"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.924036 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "68b61a4c-4e01-4588-877f-91017ce2df24" (UID: "68b61a4c-4e01-4588-877f-91017ce2df24"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.924804 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-config" (OuterVolumeSpecName: "config") pod "68b61a4c-4e01-4588-877f-91017ce2df24" (UID: "68b61a4c-4e01-4588-877f-91017ce2df24"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.931576 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.931606 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9vvc\" (UniqueName: \"kubernetes.io/projected/68b61a4c-4e01-4588-877f-91017ce2df24-kube-api-access-k9vvc\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.931616 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.931624 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.943818 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "68b61a4c-4e01-4588-877f-91017ce2df24" (UID: "68b61a4c-4e01-4588-877f-91017ce2df24"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:14 crc kubenswrapper[4773]: I0122 12:18:14.960943 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "68b61a4c-4e01-4588-877f-91017ce2df24" (UID: "68b61a4c-4e01-4588-877f-91017ce2df24"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.033982 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.034031 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/68b61a4c-4e01-4588-877f-91017ce2df24-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.364204 4773 generic.go:334] "Generic (PLEG): container finished" podID="68b61a4c-4e01-4588-877f-91017ce2df24" containerID="7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0" exitCode=0 Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.365338 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.371375 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" event={"ID":"68b61a4c-4e01-4588-877f-91017ce2df24","Type":"ContainerDied","Data":"7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0"} Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.371449 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-xw2mz" event={"ID":"68b61a4c-4e01-4588-877f-91017ce2df24","Type":"ContainerDied","Data":"39aa190fa5efab3796562777b79a946e8515dbe0fc6fe8c0d92af46e9e73b659"} Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.371473 4773 scope.go:117] "RemoveContainer" containerID="7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.440151 4773 scope.go:117] "RemoveContainer" containerID="55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.455794 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-xw2mz"] Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.465787 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-xw2mz"] Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.483462 4773 scope.go:117] "RemoveContainer" containerID="7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0" Jan 22 12:18:15 crc kubenswrapper[4773]: E0122 12:18:15.488770 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0\": container with ID starting with 7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0 not found: ID does not exist" containerID="7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.488936 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0"} err="failed to get container status \"7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0\": rpc error: code = NotFound desc = could not find container \"7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0\": container with ID starting with 7efeecf21222a49c18fffb381c026b88c867704337a91777a215f1b5469f21b0 not found: ID does not exist" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.488967 4773 scope.go:117] "RemoveContainer" containerID="55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0" Jan 22 12:18:15 crc kubenswrapper[4773]: E0122 12:18:15.491684 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0\": container with ID starting with 55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0 not found: ID does not exist" containerID="55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0" Jan 22 12:18:15 crc kubenswrapper[4773]: I0122 12:18:15.491732 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0"} err="failed to get container status \"55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0\": rpc error: code = NotFound desc = could not find container \"55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0\": container with ID starting with 55059cd4b5ef17b22e2ad90464ec26946314dff7aa14d8ab5c318202f7f753b0 not found: ID does not exist" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.093920 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.102765 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185383 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-config-data\") pod \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185499 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktsg2\" (UniqueName: \"kubernetes.io/projected/b8d8e91e-77e9-430e-b7c3-da6898073d0a-kube-api-access-ktsg2\") pod \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185565 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-scripts\") pod \"9031d071-6292-4367-abab-a0be539a3914\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185627 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-scripts\") pod \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185691 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-config-data\") pod \"9031d071-6292-4367-abab-a0be539a3914\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185757 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/9031d071-6292-4367-abab-a0be539a3914-kube-api-access-7z5wk\") pod \"9031d071-6292-4367-abab-a0be539a3914\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185827 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-combined-ca-bundle\") pod \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\" (UID: \"b8d8e91e-77e9-430e-b7c3-da6898073d0a\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.185873 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-combined-ca-bundle\") pod \"9031d071-6292-4367-abab-a0be539a3914\" (UID: \"9031d071-6292-4367-abab-a0be539a3914\") " Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.191414 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9031d071-6292-4367-abab-a0be539a3914-kube-api-access-7z5wk" (OuterVolumeSpecName: "kube-api-access-7z5wk") pod "9031d071-6292-4367-abab-a0be539a3914" (UID: "9031d071-6292-4367-abab-a0be539a3914"). InnerVolumeSpecName "kube-api-access-7z5wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.191492 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8d8e91e-77e9-430e-b7c3-da6898073d0a-kube-api-access-ktsg2" (OuterVolumeSpecName: "kube-api-access-ktsg2") pod "b8d8e91e-77e9-430e-b7c3-da6898073d0a" (UID: "b8d8e91e-77e9-430e-b7c3-da6898073d0a"). InnerVolumeSpecName "kube-api-access-ktsg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.208628 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-scripts" (OuterVolumeSpecName: "scripts") pod "9031d071-6292-4367-abab-a0be539a3914" (UID: "9031d071-6292-4367-abab-a0be539a3914"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.217260 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-scripts" (OuterVolumeSpecName: "scripts") pod "b8d8e91e-77e9-430e-b7c3-da6898073d0a" (UID: "b8d8e91e-77e9-430e-b7c3-da6898073d0a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.237687 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-config-data" (OuterVolumeSpecName: "config-data") pod "9031d071-6292-4367-abab-a0be539a3914" (UID: "9031d071-6292-4367-abab-a0be539a3914"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.237846 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-config-data" (OuterVolumeSpecName: "config-data") pod "b8d8e91e-77e9-430e-b7c3-da6898073d0a" (UID: "b8d8e91e-77e9-430e-b7c3-da6898073d0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.256986 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8d8e91e-77e9-430e-b7c3-da6898073d0a" (UID: "b8d8e91e-77e9-430e-b7c3-da6898073d0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.257344 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9031d071-6292-4367-abab-a0be539a3914" (UID: "9031d071-6292-4367-abab-a0be539a3914"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288692 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288728 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288740 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/9031d071-6292-4367-abab-a0be539a3914-kube-api-access-7z5wk\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288749 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288757 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288765 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8d8e91e-77e9-430e-b7c3-da6898073d0a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288773 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktsg2\" (UniqueName: \"kubernetes.io/projected/b8d8e91e-77e9-430e-b7c3-da6898073d0a-kube-api-access-ktsg2\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.288780 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9031d071-6292-4367-abab-a0be539a3914-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.371886 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.371884 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jh5jz" event={"ID":"9031d071-6292-4367-abab-a0be539a3914","Type":"ContainerDied","Data":"c5324fb6aeb0e489c860941f8d4eca074945cf3411bcad28d4271c99be1e2879"} Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.372434 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5324fb6aeb0e489c860941f8d4eca074945cf3411bcad28d4271c99be1e2879" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.374013 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-b7znb" event={"ID":"b8d8e91e-77e9-430e-b7c3-da6898073d0a","Type":"ContainerDied","Data":"af2eac9bd539470b28a19c72ea215eab8cb641d1275dd8b27f587189c4dda089"} Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.374045 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af2eac9bd539470b28a19c72ea215eab8cb641d1275dd8b27f587189c4dda089" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.374092 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-b7znb" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.504836 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505439 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="registry-server" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505460 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="registry-server" Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505497 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="extract-content" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505505 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="extract-content" Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505517 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8d8e91e-77e9-430e-b7c3-da6898073d0a" containerName="nova-manage" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505524 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8d8e91e-77e9-430e-b7c3-da6898073d0a" containerName="nova-manage" Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505537 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" containerName="init" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505543 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" containerName="init" Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505565 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" containerName="dnsmasq-dns" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505572 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" containerName="dnsmasq-dns" Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505586 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="extract-utilities" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505593 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="extract-utilities" Jan 22 12:18:16 crc kubenswrapper[4773]: E0122 12:18:16.505606 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9031d071-6292-4367-abab-a0be539a3914" containerName="nova-cell1-conductor-db-sync" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505613 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9031d071-6292-4367-abab-a0be539a3914" containerName="nova-cell1-conductor-db-sync" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505826 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9031d071-6292-4367-abab-a0be539a3914" containerName="nova-cell1-conductor-db-sync" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505839 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8d8e91e-77e9-430e-b7c3-da6898073d0a" containerName="nova-manage" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505863 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" containerName="dnsmasq-dns" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.505884 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b8ff0c-2b27-46ce-8c24-32794fd745a1" containerName="registry-server" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.506786 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.515737 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.516516 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.572377 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.572624 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" containerName="nova-scheduler-scheduler" containerID="cri-o://8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" gracePeriod=30 Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.584404 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.584676 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-log" containerID="cri-o://bea10f58e6a51db6feed23f6a77e5897dc2fe883d0eee03f34a3cd0bf08561b6" gracePeriod=30 Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.584764 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-api" containerID="cri-o://f63370a477f68606030ea8df66a044976dbdc9dcb3a2cddbd3a295382a8b62cc" gracePeriod=30 Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.599604 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w2pz\" (UniqueName: \"kubernetes.io/projected/b9cded2e-ee73-4606-8df8-f52bb4bb139d-kube-api-access-9w2pz\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.599698 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.599753 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.670263 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68b61a4c-4e01-4588-877f-91017ce2df24" path="/var/lib/kubelet/pods/68b61a4c-4e01-4588-877f-91017ce2df24/volumes" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.702507 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w2pz\" (UniqueName: \"kubernetes.io/projected/b9cded2e-ee73-4606-8df8-f52bb4bb139d-kube-api-access-9w2pz\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.702588 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.702650 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.716945 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.717928 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.721892 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w2pz\" (UniqueName: \"kubernetes.io/projected/b9cded2e-ee73-4606-8df8-f52bb4bb139d-kube-api-access-9w2pz\") pod \"nova-cell1-conductor-0\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:16 crc kubenswrapper[4773]: I0122 12:18:16.827212 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:17 crc kubenswrapper[4773]: W0122 12:18:17.332523 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9cded2e_ee73_4606_8df8_f52bb4bb139d.slice/crio-2e0e5eb9032308c422b9fede32596dee75d172876679f67fbf73f3085790fb72 WatchSource:0}: Error finding container 2e0e5eb9032308c422b9fede32596dee75d172876679f67fbf73f3085790fb72: Status 404 returned error can't find the container with id 2e0e5eb9032308c422b9fede32596dee75d172876679f67fbf73f3085790fb72 Jan 22 12:18:17 crc kubenswrapper[4773]: I0122 12:18:17.333879 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 12:18:17 crc kubenswrapper[4773]: I0122 12:18:17.384411 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b9cded2e-ee73-4606-8df8-f52bb4bb139d","Type":"ContainerStarted","Data":"2e0e5eb9032308c422b9fede32596dee75d172876679f67fbf73f3085790fb72"} Jan 22 12:18:17 crc kubenswrapper[4773]: I0122 12:18:17.387740 4773 generic.go:334] "Generic (PLEG): container finished" podID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerID="bea10f58e6a51db6feed23f6a77e5897dc2fe883d0eee03f34a3cd0bf08561b6" exitCode=143 Jan 22 12:18:17 crc kubenswrapper[4773]: I0122 12:18:17.387811 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bda04167-ca9d-4b3f-a3b3-9f504d52539f","Type":"ContainerDied","Data":"bea10f58e6a51db6feed23f6a77e5897dc2fe883d0eee03f34a3cd0bf08561b6"} Jan 22 12:18:18 crc kubenswrapper[4773]: I0122 12:18:18.397794 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b9cded2e-ee73-4606-8df8-f52bb4bb139d","Type":"ContainerStarted","Data":"43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01"} Jan 22 12:18:18 crc kubenswrapper[4773]: I0122 12:18:18.399982 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:18 crc kubenswrapper[4773]: I0122 12:18:18.422100 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.422080972 podStartE2EDuration="2.422080972s" podCreationTimestamp="2026-01-22 12:18:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:18.413991295 +0000 UTC m=+1405.992107140" watchObservedRunningTime="2026-01-22 12:18:18.422080972 +0000 UTC m=+1406.000196797" Jan 22 12:18:18 crc kubenswrapper[4773]: E0122 12:18:18.791780 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:18:18 crc kubenswrapper[4773]: E0122 12:18:18.794342 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:18:18 crc kubenswrapper[4773]: E0122 12:18:18.796151 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:18:18 crc kubenswrapper[4773]: E0122 12:18:18.796240 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" containerName="nova-scheduler-scheduler" Jan 22 12:18:19 crc kubenswrapper[4773]: I0122 12:18:19.683973 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 12:18:20 crc kubenswrapper[4773]: I0122 12:18:20.960165 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:18:20 crc kubenswrapper[4773]: I0122 12:18:20.980016 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwt7g\" (UniqueName: \"kubernetes.io/projected/f03f64d9-1e8a-43ae-bc50-762a379cf20b-kube-api-access-bwt7g\") pod \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " Jan 22 12:18:20 crc kubenswrapper[4773]: I0122 12:18:20.980174 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-config-data\") pod \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " Jan 22 12:18:20 crc kubenswrapper[4773]: I0122 12:18:20.980389 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-combined-ca-bundle\") pod \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\" (UID: \"f03f64d9-1e8a-43ae-bc50-762a379cf20b\") " Jan 22 12:18:20 crc kubenswrapper[4773]: I0122 12:18:20.985248 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f03f64d9-1e8a-43ae-bc50-762a379cf20b-kube-api-access-bwt7g" (OuterVolumeSpecName: "kube-api-access-bwt7g") pod "f03f64d9-1e8a-43ae-bc50-762a379cf20b" (UID: "f03f64d9-1e8a-43ae-bc50-762a379cf20b"). InnerVolumeSpecName "kube-api-access-bwt7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.028564 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f03f64d9-1e8a-43ae-bc50-762a379cf20b" (UID: "f03f64d9-1e8a-43ae-bc50-762a379cf20b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.032392 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-config-data" (OuterVolumeSpecName: "config-data") pod "f03f64d9-1e8a-43ae-bc50-762a379cf20b" (UID: "f03f64d9-1e8a-43ae-bc50-762a379cf20b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.083386 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.083425 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f03f64d9-1e8a-43ae-bc50-762a379cf20b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.083437 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwt7g\" (UniqueName: \"kubernetes.io/projected/f03f64d9-1e8a-43ae-bc50-762a379cf20b-kube-api-access-bwt7g\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.428864 4773 generic.go:334] "Generic (PLEG): container finished" podID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerID="f63370a477f68606030ea8df66a044976dbdc9dcb3a2cddbd3a295382a8b62cc" exitCode=0 Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.428997 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bda04167-ca9d-4b3f-a3b3-9f504d52539f","Type":"ContainerDied","Data":"f63370a477f68606030ea8df66a044976dbdc9dcb3a2cddbd3a295382a8b62cc"} Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.430641 4773 generic.go:334] "Generic (PLEG): container finished" podID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" exitCode=0 Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.430665 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f03f64d9-1e8a-43ae-bc50-762a379cf20b","Type":"ContainerDied","Data":"8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4"} Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.430685 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f03f64d9-1e8a-43ae-bc50-762a379cf20b","Type":"ContainerDied","Data":"49bde181871f7e942e66a46636b36a91fd714453e257ff6d98df64a4a8f1d8c8"} Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.430702 4773 scope.go:117] "RemoveContainer" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.430730 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.437719 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.455236 4773 scope.go:117] "RemoveContainer" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" Jan 22 12:18:21 crc kubenswrapper[4773]: E0122 12:18:21.455687 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4\": container with ID starting with 8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4 not found: ID does not exist" containerID="8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.455781 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4"} err="failed to get container status \"8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4\": rpc error: code = NotFound desc = could not find container \"8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4\": container with ID starting with 8f1f477ab6b3649fabc5fecec0621a818d3d044e54901e2192ef9e84575498b4 not found: ID does not exist" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.491006 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7zxh\" (UniqueName: \"kubernetes.io/projected/bda04167-ca9d-4b3f-a3b3-9f504d52539f-kube-api-access-g7zxh\") pod \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.491107 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda04167-ca9d-4b3f-a3b3-9f504d52539f-logs\") pod \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.491149 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-combined-ca-bundle\") pod \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.491265 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-config-data\") pod \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\" (UID: \"bda04167-ca9d-4b3f-a3b3-9f504d52539f\") " Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.492169 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bda04167-ca9d-4b3f-a3b3-9f504d52539f-logs" (OuterVolumeSpecName: "logs") pod "bda04167-ca9d-4b3f-a3b3-9f504d52539f" (UID: "bda04167-ca9d-4b3f-a3b3-9f504d52539f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.492949 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.504709 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.519246 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bda04167-ca9d-4b3f-a3b3-9f504d52539f-kube-api-access-g7zxh" (OuterVolumeSpecName: "kube-api-access-g7zxh") pod "bda04167-ca9d-4b3f-a3b3-9f504d52539f" (UID: "bda04167-ca9d-4b3f-a3b3-9f504d52539f"). InnerVolumeSpecName "kube-api-access-g7zxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527171 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:21 crc kubenswrapper[4773]: E0122 12:18:21.527574 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" containerName="nova-scheduler-scheduler" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527589 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" containerName="nova-scheduler-scheduler" Jan 22 12:18:21 crc kubenswrapper[4773]: E0122 12:18:21.527601 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-api" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527607 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-api" Jan 22 12:18:21 crc kubenswrapper[4773]: E0122 12:18:21.527630 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-log" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527635 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-log" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527793 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-api" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527805 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" containerName="nova-api-log" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.527824 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" containerName="nova-scheduler-scheduler" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.528395 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.534339 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.537915 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-config-data" (OuterVolumeSpecName: "config-data") pod "bda04167-ca9d-4b3f-a3b3-9f504d52539f" (UID: "bda04167-ca9d-4b3f-a3b3-9f504d52539f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.538977 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bda04167-ca9d-4b3f-a3b3-9f504d52539f" (UID: "bda04167-ca9d-4b3f-a3b3-9f504d52539f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.554457 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593051 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-config-data\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593268 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593557 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hblqn\" (UniqueName: \"kubernetes.io/projected/a8a347dd-daa2-4e18-abec-6c0134da885c-kube-api-access-hblqn\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593801 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda04167-ca9d-4b3f-a3b3-9f504d52539f-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593823 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593835 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda04167-ca9d-4b3f-a3b3-9f504d52539f-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.593848 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7zxh\" (UniqueName: \"kubernetes.io/projected/bda04167-ca9d-4b3f-a3b3-9f504d52539f-kube-api-access-g7zxh\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.695819 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.695949 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hblqn\" (UniqueName: \"kubernetes.io/projected/a8a347dd-daa2-4e18-abec-6c0134da885c-kube-api-access-hblqn\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.696036 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-config-data\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.700363 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.700381 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-config-data\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.715710 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hblqn\" (UniqueName: \"kubernetes.io/projected/a8a347dd-daa2-4e18-abec-6c0134da885c-kube-api-access-hblqn\") pod \"nova-scheduler-0\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " pod="openstack/nova-scheduler-0" Jan 22 12:18:21 crc kubenswrapper[4773]: I0122 12:18:21.896956 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.356991 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:18:22 crc kubenswrapper[4773]: W0122 12:18:22.377372 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8a347dd_daa2_4e18_abec_6c0134da885c.slice/crio-56c05f7db53467182e1c2f2c4a13fa61df7442d9087ce4f6dfa52f2bd83bd417 WatchSource:0}: Error finding container 56c05f7db53467182e1c2f2c4a13fa61df7442d9087ce4f6dfa52f2bd83bd417: Status 404 returned error can't find the container with id 56c05f7db53467182e1c2f2c4a13fa61df7442d9087ce4f6dfa52f2bd83bd417 Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.442105 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a8a347dd-daa2-4e18-abec-6c0134da885c","Type":"ContainerStarted","Data":"56c05f7db53467182e1c2f2c4a13fa61df7442d9087ce4f6dfa52f2bd83bd417"} Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.444053 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bda04167-ca9d-4b3f-a3b3-9f504d52539f","Type":"ContainerDied","Data":"73d859f0b20d7b964f88dc1cc74fabc2b009e99bc8b3fa164453548f45a1bbba"} Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.444103 4773 scope.go:117] "RemoveContainer" containerID="f63370a477f68606030ea8df66a044976dbdc9dcb3a2cddbd3a295382a8b62cc" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.444106 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.483264 4773 scope.go:117] "RemoveContainer" containerID="bea10f58e6a51db6feed23f6a77e5897dc2fe883d0eee03f34a3cd0bf08561b6" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.487506 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.501421 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.514830 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.516651 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.519042 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.523243 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.613788 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fb7e130-2923-482e-8861-6c40e010a0fb-logs\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.613994 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-config-data\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.614051 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pthrl\" (UniqueName: \"kubernetes.io/projected/6fb7e130-2923-482e-8861-6c40e010a0fb-kube-api-access-pthrl\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.614107 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.671250 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bda04167-ca9d-4b3f-a3b3-9f504d52539f" path="/var/lib/kubelet/pods/bda04167-ca9d-4b3f-a3b3-9f504d52539f/volumes" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.671900 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f03f64d9-1e8a-43ae-bc50-762a379cf20b" path="/var/lib/kubelet/pods/f03f64d9-1e8a-43ae-bc50-762a379cf20b/volumes" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.716745 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pthrl\" (UniqueName: \"kubernetes.io/projected/6fb7e130-2923-482e-8861-6c40e010a0fb-kube-api-access-pthrl\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.716875 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.716952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fb7e130-2923-482e-8861-6c40e010a0fb-logs\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.717136 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-config-data\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.718047 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fb7e130-2923-482e-8861-6c40e010a0fb-logs\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.723111 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-config-data\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.727353 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.735895 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pthrl\" (UniqueName: \"kubernetes.io/projected/6fb7e130-2923-482e-8861-6c40e010a0fb-kube-api-access-pthrl\") pod \"nova-api-0\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " pod="openstack/nova-api-0" Jan 22 12:18:22 crc kubenswrapper[4773]: I0122 12:18:22.865395 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:23 crc kubenswrapper[4773]: I0122 12:18:23.376140 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:23 crc kubenswrapper[4773]: W0122 12:18:23.385628 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fb7e130_2923_482e_8861_6c40e010a0fb.slice/crio-a68f167d6f99a60d2051bee02e2de05e33505b0f82324ed05aaaf1b3d22f8d20 WatchSource:0}: Error finding container a68f167d6f99a60d2051bee02e2de05e33505b0f82324ed05aaaf1b3d22f8d20: Status 404 returned error can't find the container with id a68f167d6f99a60d2051bee02e2de05e33505b0f82324ed05aaaf1b3d22f8d20 Jan 22 12:18:23 crc kubenswrapper[4773]: I0122 12:18:23.461354 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fb7e130-2923-482e-8861-6c40e010a0fb","Type":"ContainerStarted","Data":"a68f167d6f99a60d2051bee02e2de05e33505b0f82324ed05aaaf1b3d22f8d20"} Jan 22 12:18:23 crc kubenswrapper[4773]: I0122 12:18:23.462995 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a8a347dd-daa2-4e18-abec-6c0134da885c","Type":"ContainerStarted","Data":"8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a"} Jan 22 12:18:23 crc kubenswrapper[4773]: I0122 12:18:23.487842 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.487820698 podStartE2EDuration="2.487820698s" podCreationTimestamp="2026-01-22 12:18:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:23.481635674 +0000 UTC m=+1411.059751509" watchObservedRunningTime="2026-01-22 12:18:23.487820698 +0000 UTC m=+1411.065936523" Jan 22 12:18:23 crc kubenswrapper[4773]: I0122 12:18:23.838295 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:18:23 crc kubenswrapper[4773]: I0122 12:18:23.838905 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" containerName="kube-state-metrics" containerID="cri-o://e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0" gracePeriod=30 Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.450706 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.479518 4773 generic.go:334] "Generic (PLEG): container finished" podID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" containerID="e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0" exitCode=2 Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.479577 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1","Type":"ContainerDied","Data":"e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0"} Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.479611 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1","Type":"ContainerDied","Data":"f9a751b1105a66273c07a4766f5f5965d779bee639ff5c2cdd843861e10f31a7"} Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.479632 4773 scope.go:117] "RemoveContainer" containerID="e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.479793 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.483840 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fb7e130-2923-482e-8861-6c40e010a0fb","Type":"ContainerStarted","Data":"12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e"} Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.483877 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fb7e130-2923-482e-8861-6c40e010a0fb","Type":"ContainerStarted","Data":"89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34"} Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.508348 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.508332812 podStartE2EDuration="2.508332812s" podCreationTimestamp="2026-01-22 12:18:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:24.508065575 +0000 UTC m=+1412.086181400" watchObservedRunningTime="2026-01-22 12:18:24.508332812 +0000 UTC m=+1412.086448637" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.536450 4773 scope.go:117] "RemoveContainer" containerID="e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0" Jan 22 12:18:24 crc kubenswrapper[4773]: E0122 12:18:24.537213 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0\": container with ID starting with e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0 not found: ID does not exist" containerID="e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.537298 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0"} err="failed to get container status \"e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0\": rpc error: code = NotFound desc = could not find container \"e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0\": container with ID starting with e9f730470eabafffce1e8c26b31435cfc09dae14559dc09b36395af2ed3705f0 not found: ID does not exist" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.554727 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzdfr\" (UniqueName: \"kubernetes.io/projected/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1-kube-api-access-vzdfr\") pod \"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1\" (UID: \"8ab88165-fbbf-403c-b1c2-ecb80db3e8d1\") " Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.559258 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1-kube-api-access-vzdfr" (OuterVolumeSpecName: "kube-api-access-vzdfr") pod "8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" (UID: "8ab88165-fbbf-403c-b1c2-ecb80db3e8d1"). InnerVolumeSpecName "kube-api-access-vzdfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.660511 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzdfr\" (UniqueName: \"kubernetes.io/projected/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1-kube-api-access-vzdfr\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.809625 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.816966 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.826677 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:18:24 crc kubenswrapper[4773]: E0122 12:18:24.827088 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" containerName="kube-state-metrics" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.827106 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" containerName="kube-state-metrics" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.827264 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" containerName="kube-state-metrics" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.827929 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.830781 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.830830 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.847914 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.966063 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.966121 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.966217 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:24 crc kubenswrapper[4773]: I0122 12:18:24.966254 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pgw5\" (UniqueName: \"kubernetes.io/projected/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-api-access-2pgw5\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.067956 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.068370 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pgw5\" (UniqueName: \"kubernetes.io/projected/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-api-access-2pgw5\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.068532 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.068593 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.073675 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.073975 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.074125 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.085392 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pgw5\" (UniqueName: \"kubernetes.io/projected/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-api-access-2pgw5\") pod \"kube-state-metrics-0\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.146815 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.617328 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:18:25 crc kubenswrapper[4773]: W0122 12:18:25.621588 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd4905ba_f2f9_42c0_a21b_fabf4046af68.slice/crio-cdb59e48e39a3e500054d64b9cd5f42dbe1b8100767be6c020c66e522d3c311d WatchSource:0}: Error finding container cdb59e48e39a3e500054d64b9cd5f42dbe1b8100767be6c020c66e522d3c311d: Status 404 returned error can't find the container with id cdb59e48e39a3e500054d64b9cd5f42dbe1b8100767be6c020c66e522d3c311d Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.932409 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.932680 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-central-agent" containerID="cri-o://2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6" gracePeriod=30 Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.933104 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="proxy-httpd" containerID="cri-o://8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea" gracePeriod=30 Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.933148 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="sg-core" containerID="cri-o://581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26" gracePeriod=30 Jan 22 12:18:25 crc kubenswrapper[4773]: I0122 12:18:25.933182 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-notification-agent" containerID="cri-o://84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60" gracePeriod=30 Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.502768 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd4905ba-f2f9-42c0-a21b-fabf4046af68","Type":"ContainerStarted","Data":"55704dc681b7f0b63c484b59d3778f9c323873b4e2e6cccd2b1dc30aecbc8f1e"} Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.502827 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd4905ba-f2f9-42c0-a21b-fabf4046af68","Type":"ContainerStarted","Data":"cdb59e48e39a3e500054d64b9cd5f42dbe1b8100767be6c020c66e522d3c311d"} Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.502867 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.505276 4773 generic.go:334] "Generic (PLEG): container finished" podID="d428f354-8bb3-4be8-9108-407bb678e12b" containerID="8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea" exitCode=0 Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.505341 4773 generic.go:334] "Generic (PLEG): container finished" podID="d428f354-8bb3-4be8-9108-407bb678e12b" containerID="581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26" exitCode=2 Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.505354 4773 generic.go:334] "Generic (PLEG): container finished" podID="d428f354-8bb3-4be8-9108-407bb678e12b" containerID="2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6" exitCode=0 Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.505375 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerDied","Data":"8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea"} Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.505395 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerDied","Data":"581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26"} Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.505407 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerDied","Data":"2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6"} Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.519628 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.154452112 podStartE2EDuration="2.519607723s" podCreationTimestamp="2026-01-22 12:18:24 +0000 UTC" firstStartedPulling="2026-01-22 12:18:25.62357762 +0000 UTC m=+1413.201693445" lastFinishedPulling="2026-01-22 12:18:25.988733221 +0000 UTC m=+1413.566849056" observedRunningTime="2026-01-22 12:18:26.51627612 +0000 UTC m=+1414.094391945" watchObservedRunningTime="2026-01-22 12:18:26.519607723 +0000 UTC m=+1414.097723548" Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.667365 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ab88165-fbbf-403c-b1c2-ecb80db3e8d1" path="/var/lib/kubelet/pods/8ab88165-fbbf-403c-b1c2-ecb80db3e8d1/volumes" Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.862680 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 22 12:18:26 crc kubenswrapper[4773]: I0122 12:18:26.898045 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 12:18:28 crc kubenswrapper[4773]: I0122 12:18:28.981116 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.161324 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-scripts\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.161758 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkmxb\" (UniqueName: \"kubernetes.io/projected/d428f354-8bb3-4be8-9108-407bb678e12b-kube-api-access-gkmxb\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.161809 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-sg-core-conf-yaml\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.161859 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-run-httpd\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.161889 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-combined-ca-bundle\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.161974 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-config-data\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.162042 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-log-httpd\") pod \"d428f354-8bb3-4be8-9108-407bb678e12b\" (UID: \"d428f354-8bb3-4be8-9108-407bb678e12b\") " Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.162360 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.162799 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.162940 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.162966 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d428f354-8bb3-4be8-9108-407bb678e12b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.171707 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d428f354-8bb3-4be8-9108-407bb678e12b-kube-api-access-gkmxb" (OuterVolumeSpecName: "kube-api-access-gkmxb") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "kube-api-access-gkmxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.173367 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-scripts" (OuterVolumeSpecName: "scripts") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.189817 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.247491 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.261245 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-config-data" (OuterVolumeSpecName: "config-data") pod "d428f354-8bb3-4be8-9108-407bb678e12b" (UID: "d428f354-8bb3-4be8-9108-407bb678e12b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.264755 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.264783 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkmxb\" (UniqueName: \"kubernetes.io/projected/d428f354-8bb3-4be8-9108-407bb678e12b-kube-api-access-gkmxb\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.264797 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.264807 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.264816 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d428f354-8bb3-4be8-9108-407bb678e12b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.541051 4773 generic.go:334] "Generic (PLEG): container finished" podID="d428f354-8bb3-4be8-9108-407bb678e12b" containerID="84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60" exitCode=0 Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.541096 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerDied","Data":"84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60"} Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.541138 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d428f354-8bb3-4be8-9108-407bb678e12b","Type":"ContainerDied","Data":"6e142c1a82433b35b2c5fb8e1274496feb8364d5cdaeb4183cae070a2144dc94"} Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.541156 4773 scope.go:117] "RemoveContainer" containerID="8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.541367 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.578795 4773 scope.go:117] "RemoveContainer" containerID="581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.591239 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.601496 4773 scope.go:117] "RemoveContainer" containerID="84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.603994 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.623996 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.624493 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-central-agent" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.624517 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-central-agent" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.624541 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="proxy-httpd" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.624550 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="proxy-httpd" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.624566 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-notification-agent" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.624574 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-notification-agent" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.624597 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="sg-core" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.624603 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="sg-core" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.628381 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="sg-core" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.628428 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-notification-agent" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.628460 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="ceilometer-central-agent" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.628478 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" containerName="proxy-httpd" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.633589 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.635881 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.636435 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.636676 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.659783 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.672378 4773 scope.go:117] "RemoveContainer" containerID="2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673020 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673092 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-scripts\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673328 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673483 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-run-httpd\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673523 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-config-data\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673633 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-log-httpd\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673684 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.673793 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26h2h\" (UniqueName: \"kubernetes.io/projected/1c8e311c-c20f-426d-8add-079480b5bc8d-kube-api-access-26h2h\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.693718 4773 scope.go:117] "RemoveContainer" containerID="8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.694324 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea\": container with ID starting with 8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea not found: ID does not exist" containerID="8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.694366 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea"} err="failed to get container status \"8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea\": rpc error: code = NotFound desc = could not find container \"8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea\": container with ID starting with 8139cdac837c382d73bbe0222e30ffe30349a6852bc6d291b80b1d6ffd2635ea not found: ID does not exist" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.694392 4773 scope.go:117] "RemoveContainer" containerID="581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.694742 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26\": container with ID starting with 581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26 not found: ID does not exist" containerID="581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.694769 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26"} err="failed to get container status \"581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26\": rpc error: code = NotFound desc = could not find container \"581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26\": container with ID starting with 581d8807aec14b7661f74e6aef0378067e0264ee2d530d5ccb033ffd34fffe26 not found: ID does not exist" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.694784 4773 scope.go:117] "RemoveContainer" containerID="84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.695122 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60\": container with ID starting with 84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60 not found: ID does not exist" containerID="84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.695186 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60"} err="failed to get container status \"84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60\": rpc error: code = NotFound desc = could not find container \"84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60\": container with ID starting with 84af1d03e405cd95144d60b6c27083ec211a353857ee7124c9819d5d535feb60 not found: ID does not exist" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.695216 4773 scope.go:117] "RemoveContainer" containerID="2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6" Jan 22 12:18:29 crc kubenswrapper[4773]: E0122 12:18:29.695516 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6\": container with ID starting with 2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6 not found: ID does not exist" containerID="2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.695540 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6"} err="failed to get container status \"2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6\": rpc error: code = NotFound desc = could not find container \"2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6\": container with ID starting with 2c8adec33450b05d1ad0f50c408bdd0def6742b06830e97106fd7c0e2ae463d6 not found: ID does not exist" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.774733 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.774793 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-run-httpd\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.774815 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-config-data\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.774855 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-log-httpd\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.774881 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.774907 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26h2h\" (UniqueName: \"kubernetes.io/projected/1c8e311c-c20f-426d-8add-079480b5bc8d-kube-api-access-26h2h\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.775022 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.775055 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-scripts\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.775939 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-log-httpd\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.775968 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-run-httpd\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.780252 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-scripts\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.780537 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-config-data\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.782986 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.785961 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.789961 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.791974 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26h2h\" (UniqueName: \"kubernetes.io/projected/1c8e311c-c20f-426d-8add-079480b5bc8d-kube-api-access-26h2h\") pod \"ceilometer-0\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " pod="openstack/ceilometer-0" Jan 22 12:18:29 crc kubenswrapper[4773]: I0122 12:18:29.967581 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:30 crc kubenswrapper[4773]: I0122 12:18:30.447802 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:30 crc kubenswrapper[4773]: W0122 12:18:30.450154 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c8e311c_c20f_426d_8add_079480b5bc8d.slice/crio-32c583e3cec0e24af9f10b34261bb48d378f7896283234345e44e5e636715e76 WatchSource:0}: Error finding container 32c583e3cec0e24af9f10b34261bb48d378f7896283234345e44e5e636715e76: Status 404 returned error can't find the container with id 32c583e3cec0e24af9f10b34261bb48d378f7896283234345e44e5e636715e76 Jan 22 12:18:30 crc kubenswrapper[4773]: I0122 12:18:30.555129 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerStarted","Data":"32c583e3cec0e24af9f10b34261bb48d378f7896283234345e44e5e636715e76"} Jan 22 12:18:30 crc kubenswrapper[4773]: I0122 12:18:30.671060 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d428f354-8bb3-4be8-9108-407bb678e12b" path="/var/lib/kubelet/pods/d428f354-8bb3-4be8-9108-407bb678e12b/volumes" Jan 22 12:18:31 crc kubenswrapper[4773]: I0122 12:18:31.566608 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerStarted","Data":"754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117"} Jan 22 12:18:31 crc kubenswrapper[4773]: I0122 12:18:31.897869 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 12:18:31 crc kubenswrapper[4773]: I0122 12:18:31.935542 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 12:18:32 crc kubenswrapper[4773]: I0122 12:18:32.585131 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerStarted","Data":"b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95"} Jan 22 12:18:32 crc kubenswrapper[4773]: I0122 12:18:32.585184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerStarted","Data":"31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273"} Jan 22 12:18:32 crc kubenswrapper[4773]: I0122 12:18:32.616004 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 12:18:32 crc kubenswrapper[4773]: I0122 12:18:32.867067 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:18:32 crc kubenswrapper[4773]: I0122 12:18:32.867610 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:18:33 crc kubenswrapper[4773]: I0122 12:18:33.908506 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 12:18:33 crc kubenswrapper[4773]: I0122 12:18:33.949538 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 12:18:34 crc kubenswrapper[4773]: I0122 12:18:34.603937 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerStarted","Data":"e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42"} Jan 22 12:18:34 crc kubenswrapper[4773]: I0122 12:18:34.604170 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:18:35 crc kubenswrapper[4773]: I0122 12:18:35.158570 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 12:18:35 crc kubenswrapper[4773]: I0122 12:18:35.204643 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.964060151 podStartE2EDuration="6.204617259s" podCreationTimestamp="2026-01-22 12:18:29 +0000 UTC" firstStartedPulling="2026-01-22 12:18:30.453978566 +0000 UTC m=+1418.032094391" lastFinishedPulling="2026-01-22 12:18:33.694535674 +0000 UTC m=+1421.272651499" observedRunningTime="2026-01-22 12:18:34.637646702 +0000 UTC m=+1422.215762517" watchObservedRunningTime="2026-01-22 12:18:35.204617259 +0000 UTC m=+1422.782733094" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.654242 4773 generic.go:334] "Generic (PLEG): container finished" podID="3c3c2963-896c-4841-9148-16e573d4b678" containerID="b686d246d0a8cf9de82f312f514ede249020f303b6d960c4a02d1e1a09deb7c8" exitCode=137 Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.654308 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c3c2963-896c-4841-9148-16e573d4b678","Type":"ContainerDied","Data":"b686d246d0a8cf9de82f312f514ede249020f303b6d960c4a02d1e1a09deb7c8"} Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.657153 4773 generic.go:334] "Generic (PLEG): container finished" podID="22d6745b-199e-4c87-835a-e3bc633b67cb" containerID="75756eaa172956b31b3f1c131ef68ec0ea677b6a7e06b13fb7b69fdee88d5e0b" exitCode=137 Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.657192 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"22d6745b-199e-4c87-835a-e3bc633b67cb","Type":"ContainerDied","Data":"75756eaa172956b31b3f1c131ef68ec0ea677b6a7e06b13fb7b69fdee88d5e0b"} Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.801188 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.807491 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811487 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3c2963-896c-4841-9148-16e573d4b678-logs\") pod \"3c3c2963-896c-4841-9148-16e573d4b678\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811539 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-combined-ca-bundle\") pod \"22d6745b-199e-4c87-835a-e3bc633b67cb\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811579 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-combined-ca-bundle\") pod \"3c3c2963-896c-4841-9148-16e573d4b678\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811616 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvtcj\" (UniqueName: \"kubernetes.io/projected/22d6745b-199e-4c87-835a-e3bc633b67cb-kube-api-access-lvtcj\") pod \"22d6745b-199e-4c87-835a-e3bc633b67cb\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811642 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-config-data\") pod \"3c3c2963-896c-4841-9148-16e573d4b678\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811684 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-config-data\") pod \"22d6745b-199e-4c87-835a-e3bc633b67cb\" (UID: \"22d6745b-199e-4c87-835a-e3bc633b67cb\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.811717 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vrzm\" (UniqueName: \"kubernetes.io/projected/3c3c2963-896c-4841-9148-16e573d4b678-kube-api-access-5vrzm\") pod \"3c3c2963-896c-4841-9148-16e573d4b678\" (UID: \"3c3c2963-896c-4841-9148-16e573d4b678\") " Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.812064 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c3c2963-896c-4841-9148-16e573d4b678-logs" (OuterVolumeSpecName: "logs") pod "3c3c2963-896c-4841-9148-16e573d4b678" (UID: "3c3c2963-896c-4841-9148-16e573d4b678"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.812943 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c3c2963-896c-4841-9148-16e573d4b678-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.821399 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c3c2963-896c-4841-9148-16e573d4b678-kube-api-access-5vrzm" (OuterVolumeSpecName: "kube-api-access-5vrzm") pod "3c3c2963-896c-4841-9148-16e573d4b678" (UID: "3c3c2963-896c-4841-9148-16e573d4b678"). InnerVolumeSpecName "kube-api-access-5vrzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.823004 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22d6745b-199e-4c87-835a-e3bc633b67cb-kube-api-access-lvtcj" (OuterVolumeSpecName: "kube-api-access-lvtcj") pod "22d6745b-199e-4c87-835a-e3bc633b67cb" (UID: "22d6745b-199e-4c87-835a-e3bc633b67cb"). InnerVolumeSpecName "kube-api-access-lvtcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.864902 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-config-data" (OuterVolumeSpecName: "config-data") pod "3c3c2963-896c-4841-9148-16e573d4b678" (UID: "3c3c2963-896c-4841-9148-16e573d4b678"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.879255 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22d6745b-199e-4c87-835a-e3bc633b67cb" (UID: "22d6745b-199e-4c87-835a-e3bc633b67cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.879855 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-config-data" (OuterVolumeSpecName: "config-data") pod "22d6745b-199e-4c87-835a-e3bc633b67cb" (UID: "22d6745b-199e-4c87-835a-e3bc633b67cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.887384 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c3c2963-896c-4841-9148-16e573d4b678" (UID: "3c3c2963-896c-4841-9148-16e573d4b678"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.914958 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.914998 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vrzm\" (UniqueName: \"kubernetes.io/projected/3c3c2963-896c-4841-9148-16e573d4b678-kube-api-access-5vrzm\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.915008 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22d6745b-199e-4c87-835a-e3bc633b67cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.915016 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.915025 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvtcj\" (UniqueName: \"kubernetes.io/projected/22d6745b-199e-4c87-835a-e3bc633b67cb-kube-api-access-lvtcj\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:39 crc kubenswrapper[4773]: I0122 12:18:39.915033 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c3c2963-896c-4841-9148-16e573d4b678-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.669639 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.671988 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"22d6745b-199e-4c87-835a-e3bc633b67cb","Type":"ContainerDied","Data":"7df95a5bf4b203f5d192f97d14c8c24918a7e9a036ef96cce7c94976d790b113"} Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.672050 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.672063 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c3c2963-896c-4841-9148-16e573d4b678","Type":"ContainerDied","Data":"18a585ebf71b6a87b5fd46f5d62fe20ba12c63f90657e748293ee794d200cc53"} Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.672100 4773 scope.go:117] "RemoveContainer" containerID="75756eaa172956b31b3f1c131ef68ec0ea677b6a7e06b13fb7b69fdee88d5e0b" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.753144 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.759172 4773 scope.go:117] "RemoveContainer" containerID="b686d246d0a8cf9de82f312f514ede249020f303b6d960c4a02d1e1a09deb7c8" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.769378 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.784166 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.794268 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: E0122 12:18:40.794877 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-metadata" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.794905 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-metadata" Jan 22 12:18:40 crc kubenswrapper[4773]: E0122 12:18:40.794929 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22d6745b-199e-4c87-835a-e3bc633b67cb" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.794938 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="22d6745b-199e-4c87-835a-e3bc633b67cb" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 12:18:40 crc kubenswrapper[4773]: E0122 12:18:40.794961 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-log" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.794970 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-log" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.795213 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-metadata" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.795238 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="22d6745b-199e-4c87-835a-e3bc633b67cb" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.795248 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c3c2963-896c-4841-9148-16e573d4b678" containerName="nova-metadata-log" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.801248 4773 scope.go:117] "RemoveContainer" containerID="f0bd586169e1175bd72e0ef2b569aa17aa4b2ac6f2536ea8fb680e61c23f04a1" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.801573 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.803058 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.812022 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.812217 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.812358 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.821796 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.823470 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.826415 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.826652 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.826866 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.827006 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835342 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835426 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835449 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835481 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbpnz\" (UniqueName: \"kubernetes.io/projected/f39ee9d9-6260-48e5-adab-63a7bb81f68b-kube-api-access-rbpnz\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835511 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f39ee9d9-6260-48e5-adab-63a7bb81f68b-logs\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835558 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-config-data\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835589 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835606 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9frq\" (UniqueName: \"kubernetes.io/projected/40f482a8-ca15-4e6f-a4af-4579c5f66101-kube-api-access-d9frq\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835630 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.835653 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937583 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-config-data\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937651 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937680 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9frq\" (UniqueName: \"kubernetes.io/projected/40f482a8-ca15-4e6f-a4af-4579c5f66101-kube-api-access-d9frq\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937710 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937742 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937796 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937846 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937863 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937895 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbpnz\" (UniqueName: \"kubernetes.io/projected/f39ee9d9-6260-48e5-adab-63a7bb81f68b-kube-api-access-rbpnz\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.937925 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f39ee9d9-6260-48e5-adab-63a7bb81f68b-logs\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.939010 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f39ee9d9-6260-48e5-adab-63a7bb81f68b-logs\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.944230 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.944822 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.946400 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-config-data\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.947223 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.949882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.952048 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.954796 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.956062 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9frq\" (UniqueName: \"kubernetes.io/projected/40f482a8-ca15-4e6f-a4af-4579c5f66101-kube-api-access-d9frq\") pod \"nova-cell1-novncproxy-0\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:40 crc kubenswrapper[4773]: I0122 12:18:40.957485 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbpnz\" (UniqueName: \"kubernetes.io/projected/f39ee9d9-6260-48e5-adab-63a7bb81f68b-kube-api-access-rbpnz\") pod \"nova-metadata-0\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " pod="openstack/nova-metadata-0" Jan 22 12:18:41 crc kubenswrapper[4773]: I0122 12:18:41.126096 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:18:41 crc kubenswrapper[4773]: I0122 12:18:41.167912 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:41 crc kubenswrapper[4773]: I0122 12:18:41.640023 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:18:41 crc kubenswrapper[4773]: I0122 12:18:41.683605 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f39ee9d9-6260-48e5-adab-63a7bb81f68b","Type":"ContainerStarted","Data":"c8e01f4a1c090aa58ab9e0ed2640cc450b32d2a7a526d1a321ef0641e1e2b708"} Jan 22 12:18:41 crc kubenswrapper[4773]: I0122 12:18:41.722053 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:18:41 crc kubenswrapper[4773]: W0122 12:18:41.725421 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40f482a8_ca15_4e6f_a4af_4579c5f66101.slice/crio-2ebfc9d45f8036c9f271dd569f6a9f4f8e507b90ff1b542f14e379eadb25e8f1 WatchSource:0}: Error finding container 2ebfc9d45f8036c9f271dd569f6a9f4f8e507b90ff1b542f14e379eadb25e8f1: Status 404 returned error can't find the container with id 2ebfc9d45f8036c9f271dd569f6a9f4f8e507b90ff1b542f14e379eadb25e8f1 Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.672183 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22d6745b-199e-4c87-835a-e3bc633b67cb" path="/var/lib/kubelet/pods/22d6745b-199e-4c87-835a-e3bc633b67cb/volumes" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.673622 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c3c2963-896c-4841-9148-16e573d4b678" path="/var/lib/kubelet/pods/3c3c2963-896c-4841-9148-16e573d4b678/volumes" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.709849 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f39ee9d9-6260-48e5-adab-63a7bb81f68b","Type":"ContainerStarted","Data":"1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2"} Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.709892 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f39ee9d9-6260-48e5-adab-63a7bb81f68b","Type":"ContainerStarted","Data":"807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff"} Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.712044 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"40f482a8-ca15-4e6f-a4af-4579c5f66101","Type":"ContainerStarted","Data":"100e88af17884d8fb4d35904ad9c62fc7acae0e26e17f113782fb3fad537ae96"} Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.712076 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"40f482a8-ca15-4e6f-a4af-4579c5f66101","Type":"ContainerStarted","Data":"2ebfc9d45f8036c9f271dd569f6a9f4f8e507b90ff1b542f14e379eadb25e8f1"} Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.736331 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.736276384 podStartE2EDuration="2.736276384s" podCreationTimestamp="2026-01-22 12:18:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:42.728102694 +0000 UTC m=+1430.306218519" watchObservedRunningTime="2026-01-22 12:18:42.736276384 +0000 UTC m=+1430.314392219" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.748620 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.74859531 podStartE2EDuration="2.74859531s" podCreationTimestamp="2026-01-22 12:18:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:42.744116684 +0000 UTC m=+1430.322232509" watchObservedRunningTime="2026-01-22 12:18:42.74859531 +0000 UTC m=+1430.326711135" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.872778 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.872972 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.873586 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.873651 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.878191 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 12:18:42 crc kubenswrapper[4773]: I0122 12:18:42.882788 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.111930 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-82zlq"] Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.113416 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.209111 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-82zlq"] Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.295864 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.296092 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.296188 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-config\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.296229 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.296267 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.296369 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5kfv\" (UniqueName: \"kubernetes.io/projected/e4868de7-6200-4cd9-948b-e0cdbbac5838-kube-api-access-d5kfv\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.399098 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.399169 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-config\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.399198 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.399220 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.399296 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5kfv\" (UniqueName: \"kubernetes.io/projected/e4868de7-6200-4cd9-948b-e0cdbbac5838-kube-api-access-d5kfv\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.399355 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.400221 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.400221 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.400522 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.401130 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.401562 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-config\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.418924 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5kfv\" (UniqueName: \"kubernetes.io/projected/e4868de7-6200-4cd9-948b-e0cdbbac5838-kube-api-access-d5kfv\") pod \"dnsmasq-dns-fcd6f8f8f-82zlq\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.438965 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:43 crc kubenswrapper[4773]: I0122 12:18:43.971449 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-82zlq"] Jan 22 12:18:43 crc kubenswrapper[4773]: W0122 12:18:43.972346 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4868de7_6200_4cd9_948b_e0cdbbac5838.slice/crio-3d8bb2689e108796068027a1395a177ed1c49c74fe51a0ece872707a44d388aa WatchSource:0}: Error finding container 3d8bb2689e108796068027a1395a177ed1c49c74fe51a0ece872707a44d388aa: Status 404 returned error can't find the container with id 3d8bb2689e108796068027a1395a177ed1c49c74fe51a0ece872707a44d388aa Jan 22 12:18:44 crc kubenswrapper[4773]: I0122 12:18:44.732531 4773 generic.go:334] "Generic (PLEG): container finished" podID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerID="d36e3d210753b939d6e9d583e439f8568814359d8e8edd1bc3bde97ea773b8af" exitCode=0 Jan 22 12:18:44 crc kubenswrapper[4773]: I0122 12:18:44.732748 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" event={"ID":"e4868de7-6200-4cd9-948b-e0cdbbac5838","Type":"ContainerDied","Data":"d36e3d210753b939d6e9d583e439f8568814359d8e8edd1bc3bde97ea773b8af"} Jan 22 12:18:44 crc kubenswrapper[4773]: I0122 12:18:44.732939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" event={"ID":"e4868de7-6200-4cd9-948b-e0cdbbac5838","Type":"ContainerStarted","Data":"3d8bb2689e108796068027a1395a177ed1c49c74fe51a0ece872707a44d388aa"} Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.211633 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.212210 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-central-agent" containerID="cri-o://754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117" gracePeriod=30 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.212706 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="sg-core" containerID="cri-o://b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95" gracePeriod=30 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.212720 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="proxy-httpd" containerID="cri-o://e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42" gracePeriod=30 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.212770 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-notification-agent" containerID="cri-o://31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273" gracePeriod=30 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.222969 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.199:3000/\": EOF" Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.649525 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.746205 4773 generic.go:334] "Generic (PLEG): container finished" podID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerID="e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42" exitCode=0 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.746552 4773 generic.go:334] "Generic (PLEG): container finished" podID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerID="b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95" exitCode=2 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.746661 4773 generic.go:334] "Generic (PLEG): container finished" podID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerID="754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117" exitCode=0 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.746265 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerDied","Data":"e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42"} Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.746880 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerDied","Data":"b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95"} Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.746898 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerDied","Data":"754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117"} Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.749059 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" event={"ID":"e4868de7-6200-4cd9-948b-e0cdbbac5838","Type":"ContainerStarted","Data":"9d37e09ac80e6c67a8c9a0e03394d9e19ff82acb6e4f4e52dde64dc3c023fdfe"} Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.749217 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-log" containerID="cri-o://89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34" gracePeriod=30 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.749325 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-api" containerID="cri-o://12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e" gracePeriod=30 Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.749603 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:45 crc kubenswrapper[4773]: I0122 12:18:45.774269 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" podStartSLOduration=2.774251073 podStartE2EDuration="2.774251073s" podCreationTimestamp="2026-01-22 12:18:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:45.772033581 +0000 UTC m=+1433.350149436" watchObservedRunningTime="2026-01-22 12:18:45.774251073 +0000 UTC m=+1433.352366908" Jan 22 12:18:46 crc kubenswrapper[4773]: I0122 12:18:46.127340 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 12:18:46 crc kubenswrapper[4773]: I0122 12:18:46.127413 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 12:18:46 crc kubenswrapper[4773]: I0122 12:18:46.168004 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:46 crc kubenswrapper[4773]: I0122 12:18:46.760348 4773 generic.go:334] "Generic (PLEG): container finished" podID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerID="89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34" exitCode=143 Jan 22 12:18:46 crc kubenswrapper[4773]: I0122 12:18:46.763317 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fb7e130-2923-482e-8861-6c40e010a0fb","Type":"ContainerDied","Data":"89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34"} Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.603002 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.772306 4773 generic.go:334] "Generic (PLEG): container finished" podID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerID="31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273" exitCode=0 Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.772351 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerDied","Data":"31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273"} Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.772406 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c8e311c-c20f-426d-8add-079480b5bc8d","Type":"ContainerDied","Data":"32c583e3cec0e24af9f10b34261bb48d378f7896283234345e44e5e636715e76"} Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.772431 4773 scope.go:117] "RemoveContainer" containerID="e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.772481 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.790872 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-ceilometer-tls-certs\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791073 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-log-httpd\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791125 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-combined-ca-bundle\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791203 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-sg-core-conf-yaml\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791249 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-run-httpd\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791371 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-scripts\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791425 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26h2h\" (UniqueName: \"kubernetes.io/projected/1c8e311c-c20f-426d-8add-079480b5bc8d-kube-api-access-26h2h\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.791487 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-config-data\") pod \"1c8e311c-c20f-426d-8add-079480b5bc8d\" (UID: \"1c8e311c-c20f-426d-8add-079480b5bc8d\") " Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.792395 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.792750 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.798881 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.799017 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c8e311c-c20f-426d-8add-079480b5bc8d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.803379 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c8e311c-c20f-426d-8add-079480b5bc8d-kube-api-access-26h2h" (OuterVolumeSpecName: "kube-api-access-26h2h") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "kube-api-access-26h2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.806033 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-scripts" (OuterVolumeSpecName: "scripts") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.810462 4773 scope.go:117] "RemoveContainer" containerID="b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.839378 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.865022 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.892648 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.900724 4773 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.900755 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.900765 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.900773 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.900782 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26h2h\" (UniqueName: \"kubernetes.io/projected/1c8e311c-c20f-426d-8add-079480b5bc8d-kube-api-access-26h2h\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.920364 4773 scope.go:117] "RemoveContainer" containerID="31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.937465 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-config-data" (OuterVolumeSpecName: "config-data") pod "1c8e311c-c20f-426d-8add-079480b5bc8d" (UID: "1c8e311c-c20f-426d-8add-079480b5bc8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.945536 4773 scope.go:117] "RemoveContainer" containerID="754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.967718 4773 scope.go:117] "RemoveContainer" containerID="e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42" Jan 22 12:18:47 crc kubenswrapper[4773]: E0122 12:18:47.968273 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42\": container with ID starting with e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42 not found: ID does not exist" containerID="e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.968389 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42"} err="failed to get container status \"e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42\": rpc error: code = NotFound desc = could not find container \"e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42\": container with ID starting with e8d8fde545d219dc14a1f6374f5728798c2315e9a9921483b739f9ffb46c7e42 not found: ID does not exist" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.968434 4773 scope.go:117] "RemoveContainer" containerID="b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95" Jan 22 12:18:47 crc kubenswrapper[4773]: E0122 12:18:47.968948 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95\": container with ID starting with b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95 not found: ID does not exist" containerID="b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.968980 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95"} err="failed to get container status \"b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95\": rpc error: code = NotFound desc = could not find container \"b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95\": container with ID starting with b98da3d075719d522706593b4cd0faf390bf75e9fc1ef759a35c1e0e0784cd95 not found: ID does not exist" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.969004 4773 scope.go:117] "RemoveContainer" containerID="31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273" Jan 22 12:18:47 crc kubenswrapper[4773]: E0122 12:18:47.969260 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273\": container with ID starting with 31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273 not found: ID does not exist" containerID="31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.969310 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273"} err="failed to get container status \"31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273\": rpc error: code = NotFound desc = could not find container \"31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273\": container with ID starting with 31b886221b6af0e453facaea09203a62c99d45621deaf1934faec4158a444273 not found: ID does not exist" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.969334 4773 scope.go:117] "RemoveContainer" containerID="754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117" Jan 22 12:18:47 crc kubenswrapper[4773]: E0122 12:18:47.969577 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117\": container with ID starting with 754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117 not found: ID does not exist" containerID="754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117" Jan 22 12:18:47 crc kubenswrapper[4773]: I0122 12:18:47.969597 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117"} err="failed to get container status \"754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117\": rpc error: code = NotFound desc = could not find container \"754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117\": container with ID starting with 754e0b1d96c9549b2336853c30a790b2617f69aa9d79d86e7eeb32fbe4ae6117 not found: ID does not exist" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.002241 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c8e311c-c20f-426d-8add-079480b5bc8d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.115390 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.160619 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.184517 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:48 crc kubenswrapper[4773]: E0122 12:18:48.185006 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-central-agent" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185027 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-central-agent" Jan 22 12:18:48 crc kubenswrapper[4773]: E0122 12:18:48.185049 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-notification-agent" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185058 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-notification-agent" Jan 22 12:18:48 crc kubenswrapper[4773]: E0122 12:18:48.185081 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="sg-core" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185090 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="sg-core" Jan 22 12:18:48 crc kubenswrapper[4773]: E0122 12:18:48.185105 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="proxy-httpd" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185112 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="proxy-httpd" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185624 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="sg-core" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185647 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="proxy-httpd" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185657 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-central-agent" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.185682 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" containerName="ceilometer-notification-agent" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.187876 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.188159 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.199221 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.199221 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.199226 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.319873 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.319965 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.320040 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-config-data\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.320065 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd742\" (UniqueName: \"kubernetes.io/projected/8d41353c-ea0e-4005-acec-dc25faae5840-kube-api-access-pd742\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.320089 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-run-httpd\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.320328 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-scripts\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.320464 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-log-httpd\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.320523 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422376 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-config-data\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422416 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd742\" (UniqueName: \"kubernetes.io/projected/8d41353c-ea0e-4005-acec-dc25faae5840-kube-api-access-pd742\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422446 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-run-httpd\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422519 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-scripts\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422564 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-log-httpd\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422601 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.422703 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.423387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-log-httpd\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.423934 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-run-httpd\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.426146 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.426678 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-scripts\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.427315 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.429082 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.431083 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-config-data\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.444239 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd742\" (UniqueName: \"kubernetes.io/projected/8d41353c-ea0e-4005-acec-dc25faae5840-kube-api-access-pd742\") pod \"ceilometer-0\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.520709 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:18:48 crc kubenswrapper[4773]: I0122 12:18:48.690016 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c8e311c-c20f-426d-8add-079480b5bc8d" path="/var/lib/kubelet/pods/1c8e311c-c20f-426d-8add-079480b5bc8d/volumes" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.028139 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.428969 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.545584 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-config-data\") pod \"6fb7e130-2923-482e-8861-6c40e010a0fb\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.546140 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pthrl\" (UniqueName: \"kubernetes.io/projected/6fb7e130-2923-482e-8861-6c40e010a0fb-kube-api-access-pthrl\") pod \"6fb7e130-2923-482e-8861-6c40e010a0fb\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.546406 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-combined-ca-bundle\") pod \"6fb7e130-2923-482e-8861-6c40e010a0fb\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.546551 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fb7e130-2923-482e-8861-6c40e010a0fb-logs\") pod \"6fb7e130-2923-482e-8861-6c40e010a0fb\" (UID: \"6fb7e130-2923-482e-8861-6c40e010a0fb\") " Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.547530 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fb7e130-2923-482e-8861-6c40e010a0fb-logs" (OuterVolumeSpecName: "logs") pod "6fb7e130-2923-482e-8861-6c40e010a0fb" (UID: "6fb7e130-2923-482e-8861-6c40e010a0fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.553655 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fb7e130-2923-482e-8861-6c40e010a0fb-kube-api-access-pthrl" (OuterVolumeSpecName: "kube-api-access-pthrl") pod "6fb7e130-2923-482e-8861-6c40e010a0fb" (UID: "6fb7e130-2923-482e-8861-6c40e010a0fb"). InnerVolumeSpecName "kube-api-access-pthrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.597476 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fb7e130-2923-482e-8861-6c40e010a0fb" (UID: "6fb7e130-2923-482e-8861-6c40e010a0fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.618973 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-config-data" (OuterVolumeSpecName: "config-data") pod "6fb7e130-2923-482e-8861-6c40e010a0fb" (UID: "6fb7e130-2923-482e-8861-6c40e010a0fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.648868 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.648907 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fb7e130-2923-482e-8861-6c40e010a0fb-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.648921 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fb7e130-2923-482e-8861-6c40e010a0fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:49 crc kubenswrapper[4773]: I0122 12:18:49.648934 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pthrl\" (UniqueName: \"kubernetes.io/projected/6fb7e130-2923-482e-8861-6c40e010a0fb-kube-api-access-pthrl\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.806671 4773 generic.go:334] "Generic (PLEG): container finished" podID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerID="12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e" exitCode=0 Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.806752 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fb7e130-2923-482e-8861-6c40e010a0fb","Type":"ContainerDied","Data":"12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e"} Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.806761 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.806807 4773 scope.go:117] "RemoveContainer" containerID="12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.806793 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6fb7e130-2923-482e-8861-6c40e010a0fb","Type":"ContainerDied","Data":"a68f167d6f99a60d2051bee02e2de05e33505b0f82324ed05aaaf1b3d22f8d20"} Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.814399 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerStarted","Data":"a368e5d7422ab66afdc22e40b44df9f6f725ea927e78f7b574782ff86b765058"} Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.814476 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerStarted","Data":"610958e0cb31060554b78ca6249322e42e819841e0e300bd0b2015ffe19b030a"} Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.830771 4773 scope.go:117] "RemoveContainer" containerID="89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.856396 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.867559 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.880354 4773 scope.go:117] "RemoveContainer" containerID="12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e" Jan 22 12:18:50 crc kubenswrapper[4773]: E0122 12:18:49.881746 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e\": container with ID starting with 12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e not found: ID does not exist" containerID="12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.881790 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e"} err="failed to get container status \"12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e\": rpc error: code = NotFound desc = could not find container \"12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e\": container with ID starting with 12a444f35fa6de0e33e245e405626c8d10a6c4f754a855d6bdbc0aed9fedc82e not found: ID does not exist" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.881818 4773 scope.go:117] "RemoveContainer" containerID="89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34" Jan 22 12:18:50 crc kubenswrapper[4773]: E0122 12:18:49.882326 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34\": container with ID starting with 89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34 not found: ID does not exist" containerID="89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.882370 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34"} err="failed to get container status \"89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34\": rpc error: code = NotFound desc = could not find container \"89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34\": container with ID starting with 89c821cc95285388855eaa6ba9c7d09721141cc7a2707ff6a7762af3e3f66e34 not found: ID does not exist" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.892894 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:50 crc kubenswrapper[4773]: E0122 12:18:49.893578 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-api" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.893600 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-api" Jan 22 12:18:50 crc kubenswrapper[4773]: E0122 12:18:49.893620 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-log" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.893631 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-log" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.893848 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-api" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.893886 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" containerName="nova-api-log" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.895188 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.899829 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.900211 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.900365 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:49.902143 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.056637 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-public-tls-certs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.056678 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.056705 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.056823 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-logs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.057057 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwcgq\" (UniqueName: \"kubernetes.io/projected/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-kube-api-access-mwcgq\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.057332 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-config-data\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.158784 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-config-data\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.159116 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-public-tls-certs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.159144 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.159171 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.159198 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-logs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.159240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwcgq\" (UniqueName: \"kubernetes.io/projected/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-kube-api-access-mwcgq\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.159750 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-logs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.165015 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.165467 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.166156 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-public-tls-certs\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.166871 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-config-data\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.176510 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwcgq\" (UniqueName: \"kubernetes.io/projected/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-kube-api-access-mwcgq\") pod \"nova-api-0\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.227310 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.670393 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fb7e130-2923-482e-8861-6c40e010a0fb" path="/var/lib/kubelet/pods/6fb7e130-2923-482e-8861-6c40e010a0fb/volumes" Jan 22 12:18:50 crc kubenswrapper[4773]: W0122 12:18:50.766816 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cb070bc_d1be_4ac3_95fa_98f36ba1d4bb.slice/crio-9cb4ca358587ffba7d8ea76f3c4ed117be0f160d4faa83a6243f85b1874798c4 WatchSource:0}: Error finding container 9cb4ca358587ffba7d8ea76f3c4ed117be0f160d4faa83a6243f85b1874798c4: Status 404 returned error can't find the container with id 9cb4ca358587ffba7d8ea76f3c4ed117be0f160d4faa83a6243f85b1874798c4 Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.771546 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.825217 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerStarted","Data":"1210d2bb5e517e818502dcfc446a53647a2fe6e83297b49ee849c6965b74a547"} Jan 22 12:18:50 crc kubenswrapper[4773]: I0122 12:18:50.828178 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb","Type":"ContainerStarted","Data":"9cb4ca358587ffba7d8ea76f3c4ed117be0f160d4faa83a6243f85b1874798c4"} Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.126999 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.127048 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.169315 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.194451 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.859928 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb","Type":"ContainerStarted","Data":"2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226"} Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.860501 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb","Type":"ContainerStarted","Data":"f6f39eb915dba364652c96e612e33694c2e38f11986ded44c743ba5e1eeca2bf"} Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.895420 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.895400434 podStartE2EDuration="2.895400434s" podCreationTimestamp="2026-01-22 12:18:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:51.885867236 +0000 UTC m=+1439.463983061" watchObservedRunningTime="2026-01-22 12:18:51.895400434 +0000 UTC m=+1439.473516259" Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.898238 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerStarted","Data":"45b87d7500c47bd01c2b3bc23189b837e6db471a85861c82d269d27b75481b8d"} Jan 22 12:18:51 crc kubenswrapper[4773]: I0122 12:18:51.944239 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.175752 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.175848 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.199634 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-thmt8"] Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.200764 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.203598 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.204791 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.206472 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj7g6\" (UniqueName: \"kubernetes.io/projected/ab07d00f-1829-4512-ba14-b9ed26a00fed-kube-api-access-pj7g6\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.206729 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.206804 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-scripts\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.206982 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-config-data\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.220272 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-thmt8"] Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.309384 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj7g6\" (UniqueName: \"kubernetes.io/projected/ab07d00f-1829-4512-ba14-b9ed26a00fed-kube-api-access-pj7g6\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.309506 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.309540 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-scripts\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.310402 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-config-data\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.315052 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-config-data\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.315161 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.329900 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-scripts\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.335987 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj7g6\" (UniqueName: \"kubernetes.io/projected/ab07d00f-1829-4512-ba14-b9ed26a00fed-kube-api-access-pj7g6\") pod \"nova-cell1-cell-mapping-thmt8\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.519456 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.917719 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerStarted","Data":"62b857be6f7515df70b1cb2751883e724fe75bec469449ea75803a2227f3791b"} Jan 22 12:18:52 crc kubenswrapper[4773]: I0122 12:18:52.940010 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.546794945 podStartE2EDuration="4.939982266s" podCreationTimestamp="2026-01-22 12:18:48 +0000 UTC" firstStartedPulling="2026-01-22 12:18:49.032275382 +0000 UTC m=+1436.610391207" lastFinishedPulling="2026-01-22 12:18:52.425462703 +0000 UTC m=+1440.003578528" observedRunningTime="2026-01-22 12:18:52.937196117 +0000 UTC m=+1440.515311952" watchObservedRunningTime="2026-01-22 12:18:52.939982266 +0000 UTC m=+1440.518098111" Jan 22 12:18:53 crc kubenswrapper[4773]: W0122 12:18:53.000462 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab07d00f_1829_4512_ba14_b9ed26a00fed.slice/crio-af3e17ea1afcc3903fa27aa4494be3921ba0fb11be5244eb9284bf25473d3580 WatchSource:0}: Error finding container af3e17ea1afcc3903fa27aa4494be3921ba0fb11be5244eb9284bf25473d3580: Status 404 returned error can't find the container with id af3e17ea1afcc3903fa27aa4494be3921ba0fb11be5244eb9284bf25473d3580 Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.005342 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-thmt8"] Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.440417 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.525229 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-vsznr"] Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.530563 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" podUID="62748add-a2e1-49d7-bce6-3ce301867493" containerName="dnsmasq-dns" containerID="cri-o://9ad4648c049a10c681cba63fa794e244bec1c0c21ef84df78273c43f319ce9f6" gracePeriod=10 Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.953012 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-thmt8" event={"ID":"ab07d00f-1829-4512-ba14-b9ed26a00fed","Type":"ContainerStarted","Data":"c74b80d49878948d531eb721e047f59142c7de6845ab3140c2d9f029db898d0a"} Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.953066 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-thmt8" event={"ID":"ab07d00f-1829-4512-ba14-b9ed26a00fed","Type":"ContainerStarted","Data":"af3e17ea1afcc3903fa27aa4494be3921ba0fb11be5244eb9284bf25473d3580"} Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.958054 4773 generic.go:334] "Generic (PLEG): container finished" podID="62748add-a2e1-49d7-bce6-3ce301867493" containerID="9ad4648c049a10c681cba63fa794e244bec1c0c21ef84df78273c43f319ce9f6" exitCode=0 Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.958794 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" event={"ID":"62748add-a2e1-49d7-bce6-3ce301867493","Type":"ContainerDied","Data":"9ad4648c049a10c681cba63fa794e244bec1c0c21ef84df78273c43f319ce9f6"} Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.962460 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 12:18:53 crc kubenswrapper[4773]: I0122 12:18:53.975459 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-thmt8" podStartSLOduration=1.97543844 podStartE2EDuration="1.97543844s" podCreationTimestamp="2026-01-22 12:18:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:18:53.974469233 +0000 UTC m=+1441.552585078" watchObservedRunningTime="2026-01-22 12:18:53.97543844 +0000 UTC m=+1441.553554275" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.128938 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.259988 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-svc\") pod \"62748add-a2e1-49d7-bce6-3ce301867493\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.260168 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-config\") pod \"62748add-a2e1-49d7-bce6-3ce301867493\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.260345 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-nb\") pod \"62748add-a2e1-49d7-bce6-3ce301867493\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.260401 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-sb\") pod \"62748add-a2e1-49d7-bce6-3ce301867493\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.260466 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-swift-storage-0\") pod \"62748add-a2e1-49d7-bce6-3ce301867493\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.260543 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnjjb\" (UniqueName: \"kubernetes.io/projected/62748add-a2e1-49d7-bce6-3ce301867493-kube-api-access-pnjjb\") pod \"62748add-a2e1-49d7-bce6-3ce301867493\" (UID: \"62748add-a2e1-49d7-bce6-3ce301867493\") " Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.267042 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62748add-a2e1-49d7-bce6-3ce301867493-kube-api-access-pnjjb" (OuterVolumeSpecName: "kube-api-access-pnjjb") pod "62748add-a2e1-49d7-bce6-3ce301867493" (UID: "62748add-a2e1-49d7-bce6-3ce301867493"). InnerVolumeSpecName "kube-api-access-pnjjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.314709 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "62748add-a2e1-49d7-bce6-3ce301867493" (UID: "62748add-a2e1-49d7-bce6-3ce301867493"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.329369 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "62748add-a2e1-49d7-bce6-3ce301867493" (UID: "62748add-a2e1-49d7-bce6-3ce301867493"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.333750 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "62748add-a2e1-49d7-bce6-3ce301867493" (UID: "62748add-a2e1-49d7-bce6-3ce301867493"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.339012 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "62748add-a2e1-49d7-bce6-3ce301867493" (UID: "62748add-a2e1-49d7-bce6-3ce301867493"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.342148 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-config" (OuterVolumeSpecName: "config") pod "62748add-a2e1-49d7-bce6-3ce301867493" (UID: "62748add-a2e1-49d7-bce6-3ce301867493"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.363064 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.363123 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.363138 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.363153 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.363165 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62748add-a2e1-49d7-bce6-3ce301867493-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.363177 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnjjb\" (UniqueName: \"kubernetes.io/projected/62748add-a2e1-49d7-bce6-3ce301867493-kube-api-access-pnjjb\") on node \"crc\" DevicePath \"\"" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.968603 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" event={"ID":"62748add-a2e1-49d7-bce6-3ce301867493","Type":"ContainerDied","Data":"6eb0742fb70addcd4c0e63a2b1709e4c9b8a2b580328e69ad9aeff6f61e940b2"} Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.968650 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-vsznr" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.968670 4773 scope.go:117] "RemoveContainer" containerID="9ad4648c049a10c681cba63fa794e244bec1c0c21ef84df78273c43f319ce9f6" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.992627 4773 scope.go:117] "RemoveContainer" containerID="f51b77c36373f75f69e2c76f7e1cbb23b3add72fc954a2dbb38ddedc16f4bf93" Jan 22 12:18:54 crc kubenswrapper[4773]: I0122 12:18:54.997927 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-vsznr"] Jan 22 12:18:55 crc kubenswrapper[4773]: I0122 12:18:55.009671 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-vsznr"] Jan 22 12:18:56 crc kubenswrapper[4773]: I0122 12:18:56.669530 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62748add-a2e1-49d7-bce6-3ce301867493" path="/var/lib/kubelet/pods/62748add-a2e1-49d7-bce6-3ce301867493/volumes" Jan 22 12:18:59 crc kubenswrapper[4773]: I0122 12:18:59.033853 4773 generic.go:334] "Generic (PLEG): container finished" podID="ab07d00f-1829-4512-ba14-b9ed26a00fed" containerID="c74b80d49878948d531eb721e047f59142c7de6845ab3140c2d9f029db898d0a" exitCode=0 Jan 22 12:18:59 crc kubenswrapper[4773]: I0122 12:18:59.033963 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-thmt8" event={"ID":"ab07d00f-1829-4512-ba14-b9ed26a00fed","Type":"ContainerDied","Data":"c74b80d49878948d531eb721e047f59142c7de6845ab3140c2d9f029db898d0a"} Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.227486 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.227889 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.507811 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.683859 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-combined-ca-bundle\") pod \"ab07d00f-1829-4512-ba14-b9ed26a00fed\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.683940 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-scripts\") pod \"ab07d00f-1829-4512-ba14-b9ed26a00fed\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.684008 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj7g6\" (UniqueName: \"kubernetes.io/projected/ab07d00f-1829-4512-ba14-b9ed26a00fed-kube-api-access-pj7g6\") pod \"ab07d00f-1829-4512-ba14-b9ed26a00fed\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.684074 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-config-data\") pod \"ab07d00f-1829-4512-ba14-b9ed26a00fed\" (UID: \"ab07d00f-1829-4512-ba14-b9ed26a00fed\") " Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.690329 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab07d00f-1829-4512-ba14-b9ed26a00fed-kube-api-access-pj7g6" (OuterVolumeSpecName: "kube-api-access-pj7g6") pod "ab07d00f-1829-4512-ba14-b9ed26a00fed" (UID: "ab07d00f-1829-4512-ba14-b9ed26a00fed"). InnerVolumeSpecName "kube-api-access-pj7g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.697420 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-scripts" (OuterVolumeSpecName: "scripts") pod "ab07d00f-1829-4512-ba14-b9ed26a00fed" (UID: "ab07d00f-1829-4512-ba14-b9ed26a00fed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.727217 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-config-data" (OuterVolumeSpecName: "config-data") pod "ab07d00f-1829-4512-ba14-b9ed26a00fed" (UID: "ab07d00f-1829-4512-ba14-b9ed26a00fed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.729920 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab07d00f-1829-4512-ba14-b9ed26a00fed" (UID: "ab07d00f-1829-4512-ba14-b9ed26a00fed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.787616 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj7g6\" (UniqueName: \"kubernetes.io/projected/ab07d00f-1829-4512-ba14-b9ed26a00fed-kube-api-access-pj7g6\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.787658 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.787672 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:00 crc kubenswrapper[4773]: I0122 12:19:00.787684 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab07d00f-1829-4512-ba14-b9ed26a00fed-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.055044 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-thmt8" event={"ID":"ab07d00f-1829-4512-ba14-b9ed26a00fed","Type":"ContainerDied","Data":"af3e17ea1afcc3903fa27aa4494be3921ba0fb11be5244eb9284bf25473d3580"} Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.055350 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af3e17ea1afcc3903fa27aa4494be3921ba0fb11be5244eb9284bf25473d3580" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.055128 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-thmt8" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.141653 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.141836 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.150710 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.153631 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.260432 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.260792 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.269728 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.269973 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-log" containerID="cri-o://f6f39eb915dba364652c96e612e33694c2e38f11986ded44c743ba5e1eeca2bf" gracePeriod=30 Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.270429 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-api" containerID="cri-o://2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226" gracePeriod=30 Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.281463 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.281731 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a8a347dd-daa2-4e18-abec-6c0134da885c" containerName="nova-scheduler-scheduler" containerID="cri-o://8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" gracePeriod=30 Jan 22 12:19:01 crc kubenswrapper[4773]: I0122 12:19:01.332568 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:01 crc kubenswrapper[4773]: E0122 12:19:01.905845 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:19:01 crc kubenswrapper[4773]: E0122 12:19:01.907471 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:19:01 crc kubenswrapper[4773]: E0122 12:19:01.909251 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:19:01 crc kubenswrapper[4773]: E0122 12:19:01.909315 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a8a347dd-daa2-4e18-abec-6c0134da885c" containerName="nova-scheduler-scheduler" Jan 22 12:19:02 crc kubenswrapper[4773]: I0122 12:19:02.065363 4773 generic.go:334] "Generic (PLEG): container finished" podID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerID="f6f39eb915dba364652c96e612e33694c2e38f11986ded44c743ba5e1eeca2bf" exitCode=143 Jan 22 12:19:02 crc kubenswrapper[4773]: I0122 12:19:02.065449 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb","Type":"ContainerDied","Data":"f6f39eb915dba364652c96e612e33694c2e38f11986ded44c743ba5e1eeca2bf"} Jan 22 12:19:03 crc kubenswrapper[4773]: I0122 12:19:03.077799 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-log" containerID="cri-o://807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff" gracePeriod=30 Jan 22 12:19:03 crc kubenswrapper[4773]: I0122 12:19:03.077888 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-metadata" containerID="cri-o://1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2" gracePeriod=30 Jan 22 12:19:04 crc kubenswrapper[4773]: I0122 12:19:04.086844 4773 generic.go:334] "Generic (PLEG): container finished" podID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerID="807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff" exitCode=143 Jan 22 12:19:04 crc kubenswrapper[4773]: I0122 12:19:04.086934 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f39ee9d9-6260-48e5-adab-63a7bb81f68b","Type":"ContainerDied","Data":"807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff"} Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.099630 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.109359 4773 generic.go:334] "Generic (PLEG): container finished" podID="a8a347dd-daa2-4e18-abec-6c0134da885c" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" exitCode=0 Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.109403 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a8a347dd-daa2-4e18-abec-6c0134da885c","Type":"ContainerDied","Data":"8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a"} Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.109434 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a8a347dd-daa2-4e18-abec-6c0134da885c","Type":"ContainerDied","Data":"56c05f7db53467182e1c2f2c4a13fa61df7442d9087ce4f6dfa52f2bd83bd417"} Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.109471 4773 scope.go:117] "RemoveContainer" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.109488 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.135946 4773 scope.go:117] "RemoveContainer" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" Jan 22 12:19:06 crc kubenswrapper[4773]: E0122 12:19:06.136553 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a\": container with ID starting with 8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a not found: ID does not exist" containerID="8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.136607 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a"} err="failed to get container status \"8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a\": rpc error: code = NotFound desc = could not find container \"8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a\": container with ID starting with 8d6f13cdc3374231179277db35da7509699182837d8a80a3601dd2bd4ba9a53a not found: ID does not exist" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.197627 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-config-data\") pod \"a8a347dd-daa2-4e18-abec-6c0134da885c\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.197745 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-combined-ca-bundle\") pod \"a8a347dd-daa2-4e18-abec-6c0134da885c\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.197950 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hblqn\" (UniqueName: \"kubernetes.io/projected/a8a347dd-daa2-4e18-abec-6c0134da885c-kube-api-access-hblqn\") pod \"a8a347dd-daa2-4e18-abec-6c0134da885c\" (UID: \"a8a347dd-daa2-4e18-abec-6c0134da885c\") " Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.204245 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8a347dd-daa2-4e18-abec-6c0134da885c-kube-api-access-hblqn" (OuterVolumeSpecName: "kube-api-access-hblqn") pod "a8a347dd-daa2-4e18-abec-6c0134da885c" (UID: "a8a347dd-daa2-4e18-abec-6c0134da885c"). InnerVolumeSpecName "kube-api-access-hblqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.229750 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:49216->10.217.0.200:8775: read: connection reset by peer" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.229759 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": read tcp 10.217.0.2:49214->10.217.0.200:8775: read: connection reset by peer" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.238579 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8a347dd-daa2-4e18-abec-6c0134da885c" (UID: "a8a347dd-daa2-4e18-abec-6c0134da885c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.240585 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-config-data" (OuterVolumeSpecName: "config-data") pod "a8a347dd-daa2-4e18-abec-6c0134da885c" (UID: "a8a347dd-daa2-4e18-abec-6c0134da885c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.300407 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hblqn\" (UniqueName: \"kubernetes.io/projected/a8a347dd-daa2-4e18-abec-6c0134da885c-kube-api-access-hblqn\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.300442 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.300455 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a347dd-daa2-4e18-abec-6c0134da885c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.473715 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.511469 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.574340 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:19:06 crc kubenswrapper[4773]: E0122 12:19:06.574859 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab07d00f-1829-4512-ba14-b9ed26a00fed" containerName="nova-manage" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.574883 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab07d00f-1829-4512-ba14-b9ed26a00fed" containerName="nova-manage" Jan 22 12:19:06 crc kubenswrapper[4773]: E0122 12:19:06.574893 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8a347dd-daa2-4e18-abec-6c0134da885c" containerName="nova-scheduler-scheduler" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.574902 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8a347dd-daa2-4e18-abec-6c0134da885c" containerName="nova-scheduler-scheduler" Jan 22 12:19:06 crc kubenswrapper[4773]: E0122 12:19:06.574918 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62748add-a2e1-49d7-bce6-3ce301867493" containerName="init" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.574926 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62748add-a2e1-49d7-bce6-3ce301867493" containerName="init" Jan 22 12:19:06 crc kubenswrapper[4773]: E0122 12:19:06.574934 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62748add-a2e1-49d7-bce6-3ce301867493" containerName="dnsmasq-dns" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.574941 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62748add-a2e1-49d7-bce6-3ce301867493" containerName="dnsmasq-dns" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.575136 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab07d00f-1829-4512-ba14-b9ed26a00fed" containerName="nova-manage" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.575155 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8a347dd-daa2-4e18-abec-6c0134da885c" containerName="nova-scheduler-scheduler" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.575172 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62748add-a2e1-49d7-bce6-3ce301867493" containerName="dnsmasq-dns" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.575964 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.597458 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.603818 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.669081 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8a347dd-daa2-4e18-abec-6c0134da885c" path="/var/lib/kubelet/pods/a8a347dd-daa2-4e18-abec-6c0134da885c/volumes" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.706764 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.706990 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9rs5\" (UniqueName: \"kubernetes.io/projected/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-kube-api-access-z9rs5\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.707411 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-config-data\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.809525 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-config-data\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.809643 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.809682 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9rs5\" (UniqueName: \"kubernetes.io/projected/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-kube-api-access-z9rs5\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.815175 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-config-data\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.829248 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.832041 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9rs5\" (UniqueName: \"kubernetes.io/projected/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-kube-api-access-z9rs5\") pod \"nova-scheduler-0\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " pod="openstack/nova-scheduler-0" Jan 22 12:19:06 crc kubenswrapper[4773]: E0122 12:19:06.886825 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cb070bc_d1be_4ac3_95fa_98f36ba1d4bb.slice/crio-conmon-2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cb070bc_d1be_4ac3_95fa_98f36ba1d4bb.slice/crio-2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226.scope\": RecentStats: unable to find data in memory cache]" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.915361 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:19:06 crc kubenswrapper[4773]: I0122 12:19:06.923080 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.012477 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f39ee9d9-6260-48e5-adab-63a7bb81f68b-logs\") pod \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.012768 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-nova-metadata-tls-certs\") pod \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.012794 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbpnz\" (UniqueName: \"kubernetes.io/projected/f39ee9d9-6260-48e5-adab-63a7bb81f68b-kube-api-access-rbpnz\") pod \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.012867 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-combined-ca-bundle\") pod \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.012943 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-config-data\") pod \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\" (UID: \"f39ee9d9-6260-48e5-adab-63a7bb81f68b\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.015068 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39ee9d9-6260-48e5-adab-63a7bb81f68b-logs" (OuterVolumeSpecName: "logs") pod "f39ee9d9-6260-48e5-adab-63a7bb81f68b" (UID: "f39ee9d9-6260-48e5-adab-63a7bb81f68b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.021221 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f39ee9d9-6260-48e5-adab-63a7bb81f68b-kube-api-access-rbpnz" (OuterVolumeSpecName: "kube-api-access-rbpnz") pod "f39ee9d9-6260-48e5-adab-63a7bb81f68b" (UID: "f39ee9d9-6260-48e5-adab-63a7bb81f68b"). InnerVolumeSpecName "kube-api-access-rbpnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.051244 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-config-data" (OuterVolumeSpecName: "config-data") pod "f39ee9d9-6260-48e5-adab-63a7bb81f68b" (UID: "f39ee9d9-6260-48e5-adab-63a7bb81f68b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.057687 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f39ee9d9-6260-48e5-adab-63a7bb81f68b" (UID: "f39ee9d9-6260-48e5-adab-63a7bb81f68b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.113396 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f39ee9d9-6260-48e5-adab-63a7bb81f68b" (UID: "f39ee9d9-6260-48e5-adab-63a7bb81f68b"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.117961 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f39ee9d9-6260-48e5-adab-63a7bb81f68b-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.118027 4773 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.118040 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbpnz\" (UniqueName: \"kubernetes.io/projected/f39ee9d9-6260-48e5-adab-63a7bb81f68b-kube-api-access-rbpnz\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.118080 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.118094 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39ee9d9-6260-48e5-adab-63a7bb81f68b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.125539 4773 generic.go:334] "Generic (PLEG): container finished" podID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerID="1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2" exitCode=0 Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.125604 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f39ee9d9-6260-48e5-adab-63a7bb81f68b","Type":"ContainerDied","Data":"1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2"} Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.125632 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f39ee9d9-6260-48e5-adab-63a7bb81f68b","Type":"ContainerDied","Data":"c8e01f4a1c090aa58ab9e0ed2640cc450b32d2a7a526d1a321ef0641e1e2b708"} Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.125651 4773 scope.go:117] "RemoveContainer" containerID="1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.125768 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.134525 4773 generic.go:334] "Generic (PLEG): container finished" podID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerID="2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226" exitCode=0 Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.134624 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb","Type":"ContainerDied","Data":"2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226"} Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.196374 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.196988 4773 scope.go:117] "RemoveContainer" containerID="807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.215124 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.228865 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:07 crc kubenswrapper[4773]: E0122 12:19:07.229325 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-log" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.229344 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-log" Jan 22 12:19:07 crc kubenswrapper[4773]: E0122 12:19:07.229374 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-metadata" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.229385 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-metadata" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.229551 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-metadata" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.229579 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" containerName="nova-metadata-log" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.230525 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.231320 4773 scope.go:117] "RemoveContainer" containerID="1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2" Jan 22 12:19:07 crc kubenswrapper[4773]: E0122 12:19:07.231813 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2\": container with ID starting with 1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2 not found: ID does not exist" containerID="1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.231860 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2"} err="failed to get container status \"1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2\": rpc error: code = NotFound desc = could not find container \"1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2\": container with ID starting with 1495548aec62e561b8ae2887478a9ced4ee717b17615a035fafa9721acb744a2 not found: ID does not exist" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.231888 4773 scope.go:117] "RemoveContainer" containerID="807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff" Jan 22 12:19:07 crc kubenswrapper[4773]: E0122 12:19:07.232544 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff\": container with ID starting with 807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff not found: ID does not exist" containerID="807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.232585 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff"} err="failed to get container status \"807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff\": rpc error: code = NotFound desc = could not find container \"807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff\": container with ID starting with 807bcb53efd78f1ee911c47ee8146803a684264debb69cda6620e4ce7aad87ff not found: ID does not exist" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.232641 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.235344 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.235466 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.242466 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323162 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-combined-ca-bundle\") pod \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323340 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-config-data\") pod \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323386 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-internal-tls-certs\") pod \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323404 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-public-tls-certs\") pod \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323450 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwcgq\" (UniqueName: \"kubernetes.io/projected/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-kube-api-access-mwcgq\") pod \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323500 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-logs\") pod \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\" (UID: \"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb\") " Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323817 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/311884d0-65e1-4737-a7ba-efc94510b90b-logs\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323920 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-config-data\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323955 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.323976 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.324125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-logs" (OuterVolumeSpecName: "logs") pod "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" (UID: "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.324127 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf84v\" (UniqueName: \"kubernetes.io/projected/311884d0-65e1-4737-a7ba-efc94510b90b-kube-api-access-jf84v\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.324385 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.328206 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-kube-api-access-mwcgq" (OuterVolumeSpecName: "kube-api-access-mwcgq") pod "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" (UID: "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb"). InnerVolumeSpecName "kube-api-access-mwcgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.348501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" (UID: "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.355323 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-config-data" (OuterVolumeSpecName: "config-data") pod "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" (UID: "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.376366 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" (UID: "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.384094 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" (UID: "5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426527 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426575 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426627 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf84v\" (UniqueName: \"kubernetes.io/projected/311884d0-65e1-4737-a7ba-efc94510b90b-kube-api-access-jf84v\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426678 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/311884d0-65e1-4737-a7ba-efc94510b90b-logs\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426759 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-config-data\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426816 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426831 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426839 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426848 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.426856 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwcgq\" (UniqueName: \"kubernetes.io/projected/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb-kube-api-access-mwcgq\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.427915 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/311884d0-65e1-4737-a7ba-efc94510b90b-logs\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.430205 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-config-data\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.430689 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.430926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.433989 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.450472 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf84v\" (UniqueName: \"kubernetes.io/projected/311884d0-65e1-4737-a7ba-efc94510b90b-kube-api-access-jf84v\") pod \"nova-metadata-0\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " pod="openstack/nova-metadata-0" Jan 22 12:19:07 crc kubenswrapper[4773]: I0122 12:19:07.547379 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:19:08 crc kubenswrapper[4773]: W0122 12:19:08.091062 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod311884d0_65e1_4737_a7ba_efc94510b90b.slice/crio-e659b9e9c635c64463ea7fcb4a22cb02a6f069b613f8ddce465894381961fdaa WatchSource:0}: Error finding container e659b9e9c635c64463ea7fcb4a22cb02a6f069b613f8ddce465894381961fdaa: Status 404 returned error can't find the container with id e659b9e9c635c64463ea7fcb4a22cb02a6f069b613f8ddce465894381961fdaa Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.091305 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.149187 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d8dfee1b-6f2f-40c0-b676-64e94df5f64d","Type":"ContainerStarted","Data":"51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de"} Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.149260 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d8dfee1b-6f2f-40c0-b676-64e94df5f64d","Type":"ContainerStarted","Data":"19088b3d43e1967e69bac645fe9dc78a46c2dd8bce7f72a51c2b7409eec1a16b"} Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.155388 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb","Type":"ContainerDied","Data":"9cb4ca358587ffba7d8ea76f3c4ed117be0f160d4faa83a6243f85b1874798c4"} Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.155463 4773 scope.go:117] "RemoveContainer" containerID="2d57f0c3cdc690c55ad26e2225a1616e07a51d4e65ec601010f5fab76b731226" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.155510 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.164954 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"311884d0-65e1-4737-a7ba-efc94510b90b","Type":"ContainerStarted","Data":"e659b9e9c635c64463ea7fcb4a22cb02a6f069b613f8ddce465894381961fdaa"} Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.173216 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.173196954 podStartE2EDuration="2.173196954s" podCreationTimestamp="2026-01-22 12:19:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:19:08.169679825 +0000 UTC m=+1455.747795650" watchObservedRunningTime="2026-01-22 12:19:08.173196954 +0000 UTC m=+1455.751312809" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.261199 4773 scope.go:117] "RemoveContainer" containerID="f6f39eb915dba364652c96e612e33694c2e38f11986ded44c743ba5e1eeca2bf" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.277802 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.289643 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.302849 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:08 crc kubenswrapper[4773]: E0122 12:19:08.303235 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-api" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.303249 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-api" Jan 22 12:19:08 crc kubenswrapper[4773]: E0122 12:19:08.303278 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-log" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.303331 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-log" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.303509 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-log" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.303537 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" containerName="nova-api-api" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.304998 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.307542 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.308190 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.308434 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.332082 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.446771 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgjj9\" (UniqueName: \"kubernetes.io/projected/637a651c-e338-45ba-8bd3-a8f838500893-kube-api-access-mgjj9\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.446880 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-public-tls-certs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.446926 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a651c-e338-45ba-8bd3-a8f838500893-logs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.446951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.447355 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-config-data\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.447421 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-internal-tls-certs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.549763 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-config-data\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.549842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-internal-tls-certs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.549992 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgjj9\" (UniqueName: \"kubernetes.io/projected/637a651c-e338-45ba-8bd3-a8f838500893-kube-api-access-mgjj9\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.550101 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-public-tls-certs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.550166 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a651c-e338-45ba-8bd3-a8f838500893-logs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.550210 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.552550 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a651c-e338-45ba-8bd3-a8f838500893-logs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.563769 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-public-tls-certs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.563906 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-internal-tls-certs\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.564175 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-config-data\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.570862 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.580325 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgjj9\" (UniqueName: \"kubernetes.io/projected/637a651c-e338-45ba-8bd3-a8f838500893-kube-api-access-mgjj9\") pod \"nova-api-0\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.637209 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.677126 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb" path="/var/lib/kubelet/pods/5cb070bc-d1be-4ac3-95fa-98f36ba1d4bb/volumes" Jan 22 12:19:08 crc kubenswrapper[4773]: I0122 12:19:08.677920 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f39ee9d9-6260-48e5-adab-63a7bb81f68b" path="/var/lib/kubelet/pods/f39ee9d9-6260-48e5-adab-63a7bb81f68b/volumes" Jan 22 12:19:09 crc kubenswrapper[4773]: I0122 12:19:09.184327 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"311884d0-65e1-4737-a7ba-efc94510b90b","Type":"ContainerStarted","Data":"84743181e9bcf205d60cacb07eb6ab1bf0f5fac0214bec2c405bd822f753600e"} Jan 22 12:19:09 crc kubenswrapper[4773]: I0122 12:19:09.185188 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"311884d0-65e1-4737-a7ba-efc94510b90b","Type":"ContainerStarted","Data":"2aaa460bcd03f0a9d75d04fec79f711d27405af19bd05d0be33e14861ac29dee"} Jan 22 12:19:09 crc kubenswrapper[4773]: I0122 12:19:09.195856 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:09 crc kubenswrapper[4773]: I0122 12:19:09.237663 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.237636303 podStartE2EDuration="2.237636303s" podCreationTimestamp="2026-01-22 12:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:19:09.214045939 +0000 UTC m=+1456.792161804" watchObservedRunningTime="2026-01-22 12:19:09.237636303 +0000 UTC m=+1456.815752148" Jan 22 12:19:10 crc kubenswrapper[4773]: I0122 12:19:10.200037 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a651c-e338-45ba-8bd3-a8f838500893","Type":"ContainerStarted","Data":"430c12aa11f46c2666087aed88a66b65f91c8e79648c0421d3f83f1922bda4ef"} Jan 22 12:19:10 crc kubenswrapper[4773]: I0122 12:19:10.200404 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a651c-e338-45ba-8bd3-a8f838500893","Type":"ContainerStarted","Data":"f208616daee705d1c4d908c528f31148f8ced61b33147903c586028760cfb3c8"} Jan 22 12:19:10 crc kubenswrapper[4773]: I0122 12:19:10.200425 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a651c-e338-45ba-8bd3-a8f838500893","Type":"ContainerStarted","Data":"9987a51e74a3e9a9f8bab6d618cb7ae86bff074a415eda858a994cf587cf2051"} Jan 22 12:19:10 crc kubenswrapper[4773]: I0122 12:19:10.270421 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.270395962 podStartE2EDuration="2.270395962s" podCreationTimestamp="2026-01-22 12:19:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:19:10.259417903 +0000 UTC m=+1457.837533748" watchObservedRunningTime="2026-01-22 12:19:10.270395962 +0000 UTC m=+1457.848511797" Jan 22 12:19:11 crc kubenswrapper[4773]: I0122 12:19:11.923663 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 12:19:12 crc kubenswrapper[4773]: I0122 12:19:12.548951 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 12:19:12 crc kubenswrapper[4773]: I0122 12:19:12.549254 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 12:19:16 crc kubenswrapper[4773]: I0122 12:19:16.924246 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 12:19:16 crc kubenswrapper[4773]: I0122 12:19:16.976114 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 12:19:17 crc kubenswrapper[4773]: I0122 12:19:17.320777 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 12:19:17 crc kubenswrapper[4773]: I0122 12:19:17.548185 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 12:19:17 crc kubenswrapper[4773]: I0122 12:19:17.548576 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 12:19:18 crc kubenswrapper[4773]: I0122 12:19:18.559478 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:19:18 crc kubenswrapper[4773]: I0122 12:19:18.561056 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:19:18 crc kubenswrapper[4773]: I0122 12:19:18.561564 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 12:19:18 crc kubenswrapper[4773]: I0122 12:19:18.638530 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:19:18 crc kubenswrapper[4773]: I0122 12:19:18.639046 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 12:19:19 crc kubenswrapper[4773]: I0122 12:19:19.648602 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.208:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:19:19 crc kubenswrapper[4773]: I0122 12:19:19.648967 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.208:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 12:19:27 crc kubenswrapper[4773]: I0122 12:19:27.553815 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 12:19:27 crc kubenswrapper[4773]: I0122 12:19:27.555794 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 12:19:27 crc kubenswrapper[4773]: I0122 12:19:27.561767 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 12:19:28 crc kubenswrapper[4773]: I0122 12:19:28.426843 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 12:19:28 crc kubenswrapper[4773]: I0122 12:19:28.652439 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 12:19:28 crc kubenswrapper[4773]: I0122 12:19:28.652952 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 12:19:28 crc kubenswrapper[4773]: I0122 12:19:28.653742 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 12:19:28 crc kubenswrapper[4773]: I0122 12:19:28.671576 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 12:19:29 crc kubenswrapper[4773]: I0122 12:19:29.431059 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 12:19:29 crc kubenswrapper[4773]: I0122 12:19:29.439805 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.653640 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-n4t5s"] Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.655654 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.666544 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.694788 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-n4t5s"] Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.783694 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg7m6\" (UniqueName: \"kubernetes.io/projected/fbe5483b-39b1-4c4c-add8-d4ec00c26108-kube-api-access-vg7m6\") pod \"root-account-create-update-n4t5s\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.783793 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts\") pod \"root-account-create-update-n4t5s\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.789958 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5b5f5dd584-sql6t"] Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.794312 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.850501 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-fw8rf"] Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.883092 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5b5f5dd584-sql6t"] Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887202 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjtxp\" (UniqueName: \"kubernetes.io/projected/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-kube-api-access-zjtxp\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887254 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-logs\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887348 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887385 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg7m6\" (UniqueName: \"kubernetes.io/projected/fbe5483b-39b1-4c4c-add8-d4ec00c26108-kube-api-access-vg7m6\") pod \"root-account-create-update-n4t5s\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887436 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts\") pod \"root-account-create-update-n4t5s\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887452 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-combined-ca-bundle\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.887493 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data-custom\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.888573 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts\") pod \"root-account-create-update-n4t5s\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.927721 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-fw8rf"] Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.963653 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg7m6\" (UniqueName: \"kubernetes.io/projected/fbe5483b-39b1-4c4c-add8-d4ec00c26108-kube-api-access-vg7m6\") pod \"root-account-create-update-n4t5s\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.994826 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjtxp\" (UniqueName: \"kubernetes.io/projected/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-kube-api-access-zjtxp\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:47 crc kubenswrapper[4773]: I0122 12:19:47.995108 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-logs\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.000440 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.003412 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.009909 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-logs\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.015832 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-combined-ca-bundle\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.015908 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data-custom\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.017157 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-a998-account-create-update-qmwlx"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.032061 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.047277 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.048019 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.058246 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjtxp\" (UniqueName: \"kubernetes.io/projected/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-kube-api-access-zjtxp\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.069522 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data-custom\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.069679 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-combined-ca-bundle\") pod \"barbican-keystone-listener-5b5f5dd584-sql6t\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.170278 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5f675cf6c9-7wz9g"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.171887 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.180626 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.197648 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5686dfc-618a-4996-a4b0-b1bd31365084-operator-scripts\") pod \"placement-a998-account-create-update-qmwlx\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.197862 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gkpw\" (UniqueName: \"kubernetes.io/projected/d5686dfc-618a-4996-a4b0-b1bd31365084-kube-api-access-2gkpw\") pod \"placement-a998-account-create-update-qmwlx\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.304630 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a998-account-create-update-qmwlx"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305103 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-combined-ca-bundle\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305173 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxvc7\" (UniqueName: \"kubernetes.io/projected/81a2f40b-f1de-449b-9e0f-45171ffa318c-kube-api-access-qxvc7\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305205 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gkpw\" (UniqueName: \"kubernetes.io/projected/d5686dfc-618a-4996-a4b0-b1bd31365084-kube-api-access-2gkpw\") pod \"placement-a998-account-create-update-qmwlx\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305309 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data-custom\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305335 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305371 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5686dfc-618a-4996-a4b0-b1bd31365084-operator-scripts\") pod \"placement-a998-account-create-update-qmwlx\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.305406 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81a2f40b-f1de-449b-9e0f-45171ffa318c-logs\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.306348 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5686dfc-618a-4996-a4b0-b1bd31365084-operator-scripts\") pod \"placement-a998-account-create-update-qmwlx\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.366615 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.366837 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="6c38a538-da2d-4097-9851-de6f8f2106c1" containerName="openstackclient" containerID="cri-o://c508023fb186e6df6d8bd07919aae57bb3fd29d1d4aa3d274d27c206409ca260" gracePeriod=2 Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.381617 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gkpw\" (UniqueName: \"kubernetes.io/projected/d5686dfc-618a-4996-a4b0-b1bd31365084-kube-api-access-2gkpw\") pod \"placement-a998-account-create-update-qmwlx\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.405931 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5f675cf6c9-7wz9g"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.407051 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-combined-ca-bundle\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.407103 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxvc7\" (UniqueName: \"kubernetes.io/projected/81a2f40b-f1de-449b-9e0f-45171ffa318c-kube-api-access-qxvc7\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.407205 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data-custom\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.407233 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.407308 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81a2f40b-f1de-449b-9e0f-45171ffa318c-logs\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.407784 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81a2f40b-f1de-449b-9e0f-45171ffa318c-logs\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.429266 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.465380 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a998-account-create-update-zzgr7"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.468070 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.479660 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxvc7\" (UniqueName: \"kubernetes.io/projected/81a2f40b-f1de-449b-9e0f-45171ffa318c-kube-api-access-qxvc7\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.479730 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-a998-account-create-update-zzgr7"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.489024 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-combined-ca-bundle\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.490913 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data-custom\") pod \"barbican-worker-5f675cf6c9-7wz9g\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.509635 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.510435 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="openstack-network-exporter" containerID="cri-o://67316730ec155c07e7e601cb8c3c3be87325a29333647bce87c3036b77e66dbf" gracePeriod=300 Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.556651 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.576171 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-ef71-account-create-update-p6lhw"] Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.576718 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c38a538-da2d-4097-9851-de6f8f2106c1" containerName="openstackclient" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.576736 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c38a538-da2d-4097-9851-de6f8f2106c1" containerName="openstackclient" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.576921 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c38a538-da2d-4097-9851-de6f8f2106c1" containerName="openstackclient" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.577649 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.591789 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.594004 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.606105 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-57dd79856-kc66z"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.607729 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.615397 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.652358 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-ef71-account-create-update-p6lhw"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721367 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-logs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721439 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721514 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nglsh\" (UniqueName: \"kubernetes.io/projected/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-kube-api-access-nglsh\") pod \"neutron-ef71-account-create-update-p6lhw\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721541 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-public-tls-certs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721605 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-combined-ca-bundle\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721636 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-operator-scripts\") pod \"neutron-ef71-account-create-update-p6lhw\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721694 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvcs6\" (UniqueName: \"kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721729 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.721774 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-internal-tls-certs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.721890 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.721950 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data podName:a4c14d2f-5507-4d08-be37-55d77b5491a3 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:49.22193162 +0000 UTC m=+1496.800047445 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data") pod "rabbitmq-cell1-server-0" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.823922 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nglsh\" (UniqueName: \"kubernetes.io/projected/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-kube-api-access-nglsh\") pod \"neutron-ef71-account-create-update-p6lhw\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.823979 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-public-tls-certs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824029 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-combined-ca-bundle\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824069 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-operator-scripts\") pod \"neutron-ef71-account-create-update-p6lhw\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824133 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvcs6\" (UniqueName: \"kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824157 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824218 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-internal-tls-certs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824275 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-logs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.824333 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.826381 4773 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.826423 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts podName:757f37a0-3cc7-4d57-a956-83d236d6cebc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:49.326411125 +0000 UTC m=+1496.904526950 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts") pod "ovn-controller-rp6dh" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc") : configmap "ovncontroller-scripts" not found Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.827659 4773 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.827694 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:49.327685941 +0000 UTC m=+1496.905801756 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-config-data" not found Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.828328 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-logs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.828340 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-operator-scripts\") pod \"neutron-ef71-account-create-update-p6lhw\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.838766 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-combined-ca-bundle\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.852905 4773 projected.go:194] Error preparing data for projected volume kube-api-access-fvcs6 for pod openstack/barbican-api-57dd79856-kc66z: failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:48 crc kubenswrapper[4773]: E0122 12:19:48.852979 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6 podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:49.352959478 +0000 UTC m=+1496.931075303 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-fvcs6" (UniqueName: "kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.866200 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-public-tls-certs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.878198 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.885772 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-internal-tls-certs\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.889001 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nglsh\" (UniqueName: \"kubernetes.io/projected/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-kube-api-access-nglsh\") pod \"neutron-ef71-account-create-update-p6lhw\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.903435 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="ovsdbserver-sb" containerID="cri-o://5fbb45accf2025632e101b486f97d89eaf10a6d83beaef12851e44a769abaae8" gracePeriod=300 Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.905006 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5" path="/var/lib/kubelet/pods/1a7e6140-b9e5-454d-8cdd-accf9ee8f5d5/volumes" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.917476 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45aa265d-efe0-4e53-8b9b-593f0da53c3f" path="/var/lib/kubelet/pods/45aa265d-efe0-4e53-8b9b-593f0da53c3f/volumes" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.918897 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0af4-account-create-update-gctq5"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.921987 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ef71-account-create-update-2zrtj"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922015 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-57dd79856-kc66z"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922032 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ef71-account-create-update-2zrtj"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922045 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0af4-account-create-update-gctq5"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922054 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0af4-account-create-update-j924j"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922064 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-ld8bk"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922075 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5b0e-account-create-update-9j2lk"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922083 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922098 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0af4-account-create-update-j924j"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.922393 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="openstack-network-exporter" containerID="cri-o://8d2a6050e2267a0f976543f79d40ed2283eece122c1f12726a2431e1a337b88a" gracePeriod=300 Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.929226 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.948783 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5b0e-account-create-update-9j2lk"] Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.952942 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 22 12:19:48 crc kubenswrapper[4773]: I0122 12:19:48.953630 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.035933 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbzzj\" (UniqueName: \"kubernetes.io/projected/bfff6d4b-eee9-4ceb-a420-512d1ba28760-kube-api-access-kbzzj\") pod \"barbican-0af4-account-create-update-gctq5\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.036545 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfff6d4b-eee9-4ceb-a420-512d1ba28760-operator-scripts\") pod \"barbican-0af4-account-create-update-gctq5\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.054122 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-ld8bk"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.135239 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-fg4x7"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.139834 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbzzj\" (UniqueName: \"kubernetes.io/projected/bfff6d4b-eee9-4ceb-a420-512d1ba28760-kube-api-access-kbzzj\") pod \"barbican-0af4-account-create-update-gctq5\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.139959 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfff6d4b-eee9-4ceb-a420-512d1ba28760-operator-scripts\") pod \"barbican-0af4-account-create-update-gctq5\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.144626 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfff6d4b-eee9-4ceb-a420-512d1ba28760-operator-scripts\") pod \"barbican-0af4-account-create-update-gctq5\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.212881 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbzzj\" (UniqueName: \"kubernetes.io/projected/bfff6d4b-eee9-4ceb-a420-512d1ba28760-kube-api-access-kbzzj\") pod \"barbican-0af4-account-create-update-gctq5\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.214166 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="ovsdbserver-nb" containerID="cri-o://0c89a12963a143e2b4ba0fe70de80385a721e9ebda036a4115b6dc5e805ca36d" gracePeriod=300 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.230504 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-fg4x7"] Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.314780 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.314850 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data podName:a4c14d2f-5507-4d08-be37-55d77b5491a3 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.314834208 +0000 UTC m=+1497.892950033 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data") pod "rabbitmq-cell1-server-0" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.336486 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.336745 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="ovn-northd" containerID="cri-o://a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.336859 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="openstack-network-exporter" containerID="cri-o://c527c488066f9ecb13a4d3372005670c6e133edf7d8f14ab25f338473ff9cd61" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.385588 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-qqn6s"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.410365 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-qqn6s"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.418940 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvcs6\" (UniqueName: \"kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.418994 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.419143 4773 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.419189 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts podName:757f37a0-3cc7-4d57-a956-83d236d6cebc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.419174609 +0000 UTC m=+1497.997290434 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts") pod "ovn-controller-rp6dh" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc") : configmap "ovncontroller-scripts" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.419638 4773 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.419663 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.419655602 +0000 UTC m=+1497.997771427 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-config-data" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.444032 4773 projected.go:194] Error preparing data for projected volume kube-api-access-fvcs6 for pod openstack/barbican-api-57dd79856-kc66z: failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.444093 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6 podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.444077596 +0000 UTC m=+1498.022193411 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-fvcs6" (UniqueName: "kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.455865 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-rp6dh"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.478498 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-tjdqh"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.478828 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-tjdqh" podUID="efd60909-f66c-4dc2-948f-5f63c735ab6e" containerName="openstack-network-exporter" containerID="cri-o://5115d35ee6bb43c24c3d875f3e5f3d449a9b5e2ac3662b5da588b01bf65d687a" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.539383 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-82zlq"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.539752 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerName="dnsmasq-dns" containerID="cri-o://9d37e09ac80e6c67a8c9a0e03394d9e19ff82acb6e4f4e52dde64dc3c023fdfe" gracePeriod=10 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.548403 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-m2dw9"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.572816 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-m2dw9"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.595111 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-8cf3-account-create-update-sg972"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.601073 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.604633 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.637373 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-sg972"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.666575 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8744-account-create-update-4ljts"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.668186 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.678608 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.692012 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-4ljts"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.707149 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-l8944"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.708779 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.724173 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-l8944"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.727509 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hskjt\" (UniqueName: \"kubernetes.io/projected/d367a46c-d551-46d3-b85f-a4499a1c4c0f-kube-api-access-hskjt\") pod \"nova-api-8cf3-account-create-update-sg972\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.727926 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d367a46c-d551-46d3-b85f-a4499a1c4c0f-operator-scripts\") pod \"nova-api-8cf3-account-create-update-sg972\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.730908 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.734587 4773 generic.go:334] "Generic (PLEG): container finished" podID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerID="9d37e09ac80e6c67a8c9a0e03394d9e19ff82acb6e4f4e52dde64dc3c023fdfe" exitCode=0 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.734721 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" event={"ID":"e4868de7-6200-4cd9-948b-e0cdbbac5838","Type":"ContainerDied","Data":"9d37e09ac80e6c67a8c9a0e03394d9e19ff82acb6e4f4e52dde64dc3c023fdfe"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.737583 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tjdqh_efd60909-f66c-4dc2-948f-5f63c735ab6e/openstack-network-exporter/0.log" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.737608 4773 generic.go:334] "Generic (PLEG): container finished" podID="efd60909-f66c-4dc2-948f-5f63c735ab6e" containerID="5115d35ee6bb43c24c3d875f3e5f3d449a9b5e2ac3662b5da588b01bf65d687a" exitCode=2 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.737653 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tjdqh" event={"ID":"efd60909-f66c-4dc2-948f-5f63c735ab6e","Type":"ContainerDied","Data":"5115d35ee6bb43c24c3d875f3e5f3d449a9b5e2ac3662b5da588b01bf65d687a"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.747023 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_37262aee-18bc-423e-9dac-272af09de237/ovsdbserver-sb/0.log" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.747057 4773 generic.go:334] "Generic (PLEG): container finished" podID="37262aee-18bc-423e-9dac-272af09de237" containerID="67316730ec155c07e7e601cb8c3c3be87325a29333647bce87c3036b77e66dbf" exitCode=2 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.747071 4773 generic.go:334] "Generic (PLEG): container finished" podID="37262aee-18bc-423e-9dac-272af09de237" containerID="5fbb45accf2025632e101b486f97d89eaf10a6d83beaef12851e44a769abaae8" exitCode=143 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.747114 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"37262aee-18bc-423e-9dac-272af09de237","Type":"ContainerDied","Data":"67316730ec155c07e7e601cb8c3c3be87325a29333647bce87c3036b77e66dbf"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.747137 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"37262aee-18bc-423e-9dac-272af09de237","Type":"ContainerDied","Data":"5fbb45accf2025632e101b486f97d89eaf10a6d83beaef12851e44a769abaae8"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.750305 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-fj6wj"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.758788 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_acfbd9c7-d136-4478-a181-7e9fb3033557/ovsdbserver-nb/0.log" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.761670 4773 generic.go:334] "Generic (PLEG): container finished" podID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerID="0c89a12963a143e2b4ba0fe70de80385a721e9ebda036a4115b6dc5e805ca36d" exitCode=143 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.761707 4773 generic.go:334] "Generic (PLEG): container finished" podID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerID="8d2a6050e2267a0f976543f79d40ed2283eece122c1f12726a2431e1a337b88a" exitCode=2 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.761790 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"acfbd9c7-d136-4478-a181-7e9fb3033557","Type":"ContainerDied","Data":"0c89a12963a143e2b4ba0fe70de80385a721e9ebda036a4115b6dc5e805ca36d"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.761824 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"acfbd9c7-d136-4478-a181-7e9fb3033557","Type":"ContainerDied","Data":"8d2a6050e2267a0f976543f79d40ed2283eece122c1f12726a2431e1a337b88a"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.762686 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-vqtpx"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.771383 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-ddfzr"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.771826 4773 generic.go:334] "Generic (PLEG): container finished" podID="21901911-8523-4adc-9851-336360f4c11e" containerID="c527c488066f9ecb13a4d3372005670c6e133edf7d8f14ab25f338473ff9cd61" exitCode=2 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.772074 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"21901911-8523-4adc-9851-336360f4c11e","Type":"ContainerDied","Data":"c527c488066f9ecb13a4d3372005670c6e133edf7d8f14ab25f338473ff9cd61"} Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.783237 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-bbfh5"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.792569 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-vqtpx"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.810172 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-ddfzr"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.817347 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-bbfh5"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.825678 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-rqktn"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.831715 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzkhs\" (UniqueName: \"kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.831821 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.831865 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hskjt\" (UniqueName: \"kubernetes.io/projected/d367a46c-d551-46d3-b85f-a4499a1c4c0f-kube-api-access-hskjt\") pod \"nova-api-8cf3-account-create-update-sg972\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.831910 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d367a46c-d551-46d3-b85f-a4499a1c4c0f-operator-scripts\") pod \"nova-api-8cf3-account-create-update-sg972\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.831977 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbc373ba-8a4b-44be-a687-63de038d5fa3-operator-scripts\") pod \"nova-cell0-eb71-account-create-update-l8944\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.832028 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smchj\" (UniqueName: \"kubernetes.io/projected/fbc373ba-8a4b-44be-a687-63de038d5fa3-kube-api-access-smchj\") pod \"nova-cell0-eb71-account-create-update-l8944\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.833635 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d367a46c-d551-46d3-b85f-a4499a1c4c0f-operator-scripts\") pod \"nova-api-8cf3-account-create-update-sg972\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.833836 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-rqktn"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.858769 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.859220 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="cinder-scheduler" containerID="cri-o://1479304cf57df7fc13f187bc0a84e2fe6c7845f988d1d96f82b74355bc8045dd" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.859494 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="probe" containerID="cri-o://231d892f6fe508e5896980a2b5938e377e1f57b840196329664f9ef1b5b26d99" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.866144 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hskjt\" (UniqueName: \"kubernetes.io/projected/d367a46c-d551-46d3-b85f-a4499a1c4c0f-kube-api-access-hskjt\") pod \"nova-api-8cf3-account-create-update-sg972\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.870257 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.893059 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-77688c4666-rzbr7"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.893314 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-77688c4666-rzbr7" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-log" containerID="cri-o://ed05a9e5edc313b45c9c66ec916cf3d4f450ad7afe2d3bf8f73e940333547c9c" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.893523 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-77688c4666-rzbr7" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-api" containerID="cri-o://bca2d75d3d19f7bde38ac5639321578fd5fe1d01e28a264520eae5ab44fb18dd" gracePeriod=30 Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.900497 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:49 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:49 crc kubenswrapper[4773]: Jan 22 12:19:49 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:49 crc kubenswrapper[4773]: Jan 22 12:19:49 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:49 crc kubenswrapper[4773]: Jan 22 12:19:49 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:49 crc kubenswrapper[4773]: Jan 22 12:19:49 crc kubenswrapper[4773]: if [ -n "" ]; then Jan 22 12:19:49 crc kubenswrapper[4773]: GRANT_DATABASE="" Jan 22 12:19:49 crc kubenswrapper[4773]: else Jan 22 12:19:49 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:49 crc kubenswrapper[4773]: fi Jan 22 12:19:49 crc kubenswrapper[4773]: Jan 22 12:19:49 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:49 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:49 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:49 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:49 crc kubenswrapper[4773]: # support updates Jan 22 12:19:49 crc kubenswrapper[4773]: Jan 22 12:19:49 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.901663 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-n4t5s" podUID="fbe5483b-39b1-4c4c-add8-d4ec00c26108" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.911941 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-4p9mz"] Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.916752 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37262aee_18bc_423e_9dac_272af09de237.slice/crio-67316730ec155c07e7e601cb8c3c3be87325a29333647bce87c3036b77e66dbf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacfbd9c7_d136_4478_a181_7e9fb3033557.slice/crio-conmon-0c89a12963a143e2b4ba0fe70de80385a721e9ebda036a4115b6dc5e805ca36d.scope\": RecentStats: unable to find data in memory cache]" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.939412 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzkhs\" (UniqueName: \"kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.939967 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.940215 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbc373ba-8a4b-44be-a687-63de038d5fa3-operator-scripts\") pod \"nova-cell0-eb71-account-create-update-l8944\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.940350 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smchj\" (UniqueName: \"kubernetes.io/projected/fbc373ba-8a4b-44be-a687-63de038d5fa3-kube-api-access-smchj\") pod \"nova-cell0-eb71-account-create-update-l8944\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.940910 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.942066 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.442050036 +0000 UTC m=+1498.020165861 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.942966 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbc373ba-8a4b-44be-a687-63de038d5fa3-operator-scripts\") pod \"nova-cell0-eb71-account-create-update-l8944\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.946685 4773 projected.go:194] Error preparing data for projected volume kube-api-access-xzkhs for pod openstack/nova-cell1-8744-account-create-update-4ljts: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:49 crc kubenswrapper[4773]: E0122 12:19:49.946755 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.446737907 +0000 UTC m=+1498.024853732 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-xzkhs" (UniqueName: "kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.975675 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-4p9mz"] Jan 22 12:19:49 crc kubenswrapper[4773]: I0122 12:19:49.981396 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smchj\" (UniqueName: \"kubernetes.io/projected/fbc373ba-8a4b-44be-a687-63de038d5fa3-kube-api-access-smchj\") pod \"nova-cell0-eb71-account-create-update-l8944\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.012076 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.012943 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api-log" containerID="cri-o://a2c743ccc8058456082f15d2a6c949e38df6e8c5dbd52d76888c3f1f8d965d57" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.014257 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api" containerID="cri-o://f0274aaa2adaf6a4656046810ff93c7b2ab941e940754cbad018d84fab5c25eb" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.049843 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.049916 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data podName:a688a76d-2498-4542-8285-709caf211e8a nodeName:}" failed. No retries permitted until 2026-01-22 12:19:50.549897615 +0000 UTC m=+1498.128013440 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data") pod "rabbitmq-server-0" (UID: "a688a76d-2498-4542-8285-709caf211e8a") : configmap "rabbitmq-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.116741 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:50 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: if [ -n "placement" ]; then Jan 22 12:19:50 crc kubenswrapper[4773]: GRANT_DATABASE="placement" Jan 22 12:19:50 crc kubenswrapper[4773]: else Jan 22 12:19:50 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:50 crc kubenswrapper[4773]: fi Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:50 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:50 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:50 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:50 crc kubenswrapper[4773]: # support updates Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.118492 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack/placement-a998-account-create-update-qmwlx" podUID="d5686dfc-618a-4996-a4b0-b1bd31365084" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.235042 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-b7znb"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.256920 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-b7znb"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.290962 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5c54fcb95c-24djg"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.291241 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5c54fcb95c-24djg" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-api" containerID="cri-o://e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.291465 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5c54fcb95c-24djg" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-httpd" containerID="cri-o://b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.319231 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-thmt8"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.322398 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-thmt8"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.338948 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339573 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-server" containerID="cri-o://e9546b98e820ca3d0bc3c7f7f6af1821b37a056264052a38280687379bf170a4" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339612 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-replicator" containerID="cri-o://c18e352494406d6cec346bf8c21ed393d59ff5e3ef7616cda1e8dcff47f2665d" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339689 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-server" containerID="cri-o://049e38f5a87408bec16aa9630dee0dfcbba21d9d01cf8238d9222592a966b9a8" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339729 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-updater" containerID="cri-o://d468ff37ff2469b69cb119a74db751875822b460315803fe8a44e5bcdf6c0ecf" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339760 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-auditor" containerID="cri-o://dd9af132571b4bed486d397dfe0215f073e7f36c6b34aa87e560ea6a3f50a32e" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339793 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-replicator" containerID="cri-o://72835dd244d879ebb03d43571334445f863b46f2f1f1ee2655d94a3e3907e8b5" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339825 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-server" containerID="cri-o://fce77a5acf624e2bfa4fbd706de0a7bc45675dcc2a670de3f60e284d1156e388" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339853 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-reaper" containerID="cri-o://186e2a68f59e8f83347891cfa964ee1ba154ccbd86503a5b5bcfcac2d36382b6" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339882 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-auditor" containerID="cri-o://8614587119ff68cbab75cc2f49c17c865dbaaace22e54b8c99cb0c2c1183280b" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.339911 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-replicator" containerID="cri-o://4550c34e059dc4785794eb4b0ed847aef46b118c21bcc68b6e25d13c02b29550" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.340031 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-expirer" containerID="cri-o://cf1b9f6bfafc8703b57d6396414ff00ca8d8f849a42e8a643dd6012f8b0ce046" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.340065 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="swift-recon-cron" containerID="cri-o://703a45160239152c8b7d740aeea8cc73cd6c09b0a35fa470daa71bdf7f10e577" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.340095 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="rsync" containerID="cri-o://dd185ed78ea8d0ba5be7c159d3c9510adff0ac36b806be4c8fe7fd52820b4ab1" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.340212 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-updater" containerID="cri-o://08cda7f85172a8a3837740fbc97bd5edfa4527dcc2c0715be2ebaf2bac22e6b7" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.340304 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-auditor" containerID="cri-o://88011f552abb27872acf5ebae2c74881b96ee28ef4202e35cd34c74e6bc5e417" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.349510 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-zp46d"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.362366 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-4475-account-create-update-crt4r"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.376333 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-4475-account-create-update-crt4r"] Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.384066 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.384119 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data podName:a4c14d2f-5507-4d08-be37-55d77b5491a3 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:52.384105691 +0000 UTC m=+1499.962221516 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data") pod "rabbitmq-cell1-server-0" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.387772 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-zp46d"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.402009 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-n4t5s"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.430657 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.430936 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-log" containerID="cri-o://ec104a46c303372809d61350bbc1b44c16590837f54cef8d940afde155b334e5" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.431407 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-httpd" containerID="cri-o://b23e36a154e8bd280a582d1bff462b1409937c638edaa9eeeed72ca40585f7bc" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.465894 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-bqvfx"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.485229 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.485339 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvcs6\" (UniqueName: \"kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.485371 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.485455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzkhs\" (UniqueName: \"kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.485790 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.485837 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:51.485824178 +0000 UTC m=+1499.063940003 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.485996 4773 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.486022 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:52.486011104 +0000 UTC m=+1500.064126929 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.486049 4773 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.486069 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts podName:757f37a0-3cc7-4d57-a956-83d236d6cebc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:52.486061025 +0000 UTC m=+1500.064176850 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts") pod "ovn-controller-rp6dh" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc") : configmap "ovncontroller-scripts" not found Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.486273 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-bqvfx"] Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.504062 4773 projected.go:194] Error preparing data for projected volume kube-api-access-fvcs6 for pod openstack/barbican-api-57dd79856-kc66z: failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.504128 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6 podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:52.50410988 +0000 UTC m=+1500.082225695 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-fvcs6" (UniqueName: "kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.504171 4773 projected.go:194] Error preparing data for projected volume kube-api-access-xzkhs for pod openstack/nova-cell1-8744-account-create-update-4ljts: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.504194 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:51.504188572 +0000 UTC m=+1499.082304397 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-xzkhs" (UniqueName: "kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.506436 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-jf4bv"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.537943 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-jf4bv"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.557493 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.557742 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-log" containerID="cri-o://3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.558351 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-httpd" containerID="cri-o://6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.574251 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a998-account-create-update-qmwlx"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.583113 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-5ztvh"] Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.587937 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.588012 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data podName:a688a76d-2498-4542-8285-709caf211e8a nodeName:}" failed. No retries permitted until 2026-01-22 12:19:51.587993048 +0000 UTC m=+1499.166108873 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data") pod "rabbitmq-server-0" (UID: "a688a76d-2498-4542-8285-709caf211e8a") : configmap "rabbitmq-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.591813 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" containerID="cri-o://42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.601908 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-sk55j"] Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.604351 4773 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 22 12:19:50 crc kubenswrapper[4773]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 22 12:19:50 crc kubenswrapper[4773]: + source /usr/local/bin/container-scripts/functions Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNBridge=br-int Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNRemote=tcp:localhost:6642 Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNEncapType=geneve Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNAvailabilityZones= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ EnableChassisAsGateway=true Jan 22 12:19:50 crc kubenswrapper[4773]: ++ PhysicalNetworks= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNHostName= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 22 12:19:50 crc kubenswrapper[4773]: ++ ovs_dir=/var/lib/openvswitch Jan 22 12:19:50 crc kubenswrapper[4773]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 22 12:19:50 crc kubenswrapper[4773]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 22 12:19:50 crc kubenswrapper[4773]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 12:19:50 crc kubenswrapper[4773]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 12:19:50 crc kubenswrapper[4773]: + sleep 0.5 Jan 22 12:19:50 crc kubenswrapper[4773]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 12:19:50 crc kubenswrapper[4773]: + cleanup_ovsdb_server_semaphore Jan 22 12:19:50 crc kubenswrapper[4773]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 12:19:50 crc kubenswrapper[4773]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 22 12:19:50 crc kubenswrapper[4773]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-fj6wj" message=< Jan 22 12:19:50 crc kubenswrapper[4773]: Exiting ovsdb-server (5) [ OK ] Jan 22 12:19:50 crc kubenswrapper[4773]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 22 12:19:50 crc kubenswrapper[4773]: + source /usr/local/bin/container-scripts/functions Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNBridge=br-int Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNRemote=tcp:localhost:6642 Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNEncapType=geneve Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNAvailabilityZones= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ EnableChassisAsGateway=true Jan 22 12:19:50 crc kubenswrapper[4773]: ++ PhysicalNetworks= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNHostName= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 22 12:19:50 crc kubenswrapper[4773]: ++ ovs_dir=/var/lib/openvswitch Jan 22 12:19:50 crc kubenswrapper[4773]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 22 12:19:50 crc kubenswrapper[4773]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 22 12:19:50 crc kubenswrapper[4773]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 12:19:50 crc kubenswrapper[4773]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 12:19:50 crc kubenswrapper[4773]: + sleep 0.5 Jan 22 12:19:50 crc kubenswrapper[4773]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 12:19:50 crc kubenswrapper[4773]: + cleanup_ovsdb_server_semaphore Jan 22 12:19:50 crc kubenswrapper[4773]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 12:19:50 crc kubenswrapper[4773]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 22 12:19:50 crc kubenswrapper[4773]: > Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.604399 4773 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 22 12:19:50 crc kubenswrapper[4773]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 22 12:19:50 crc kubenswrapper[4773]: + source /usr/local/bin/container-scripts/functions Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNBridge=br-int Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNRemote=tcp:localhost:6642 Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNEncapType=geneve Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNAvailabilityZones= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ EnableChassisAsGateway=true Jan 22 12:19:50 crc kubenswrapper[4773]: ++ PhysicalNetworks= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ OVNHostName= Jan 22 12:19:50 crc kubenswrapper[4773]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 22 12:19:50 crc kubenswrapper[4773]: ++ ovs_dir=/var/lib/openvswitch Jan 22 12:19:50 crc kubenswrapper[4773]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 22 12:19:50 crc kubenswrapper[4773]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 22 12:19:50 crc kubenswrapper[4773]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 12:19:50 crc kubenswrapper[4773]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 12:19:50 crc kubenswrapper[4773]: + sleep 0.5 Jan 22 12:19:50 crc kubenswrapper[4773]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 22 12:19:50 crc kubenswrapper[4773]: + cleanup_ovsdb_server_semaphore Jan 22 12:19:50 crc kubenswrapper[4773]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 22 12:19:50 crc kubenswrapper[4773]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 22 12:19:50 crc kubenswrapper[4773]: > pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" containerID="cri-o://705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.604440 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" containerID="cri-o://705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.622356 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-5ztvh"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.624625 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-sk55j"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.657600 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ef71-account-create-update-p6lhw"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.686371 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0419433e-4ad6-4e7e-acb0-e769c5ba611c" path="/var/lib/kubelet/pods/0419433e-4ad6-4e7e-acb0-e769c5ba611c/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.686972 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05299183-b5cc-4655-b5d2-6a89d1f5c138" path="/var/lib/kubelet/pods/05299183-b5cc-4655-b5d2-6a89d1f5c138/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.687610 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5" path="/var/lib/kubelet/pods/1963c3fd-a0b2-4a0d-8f86-c4aacfa0bff5/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.690682 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ee43913-badd-4397-a999-70b306ca56c3" path="/var/lib/kubelet/pods/1ee43913-badd-4397-a999-70b306ca56c3/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.691423 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4958bcb5-02cc-4c59-8b85-c386263dd3b8" path="/var/lib/kubelet/pods/4958bcb5-02cc-4c59-8b85-c386263dd3b8/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.692160 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e1843f0-45c3-4e84-ab98-b7db909e37bd" path="/var/lib/kubelet/pods/4e1843f0-45c3-4e84-ab98-b7db909e37bd/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.696330 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54460d78-28b8-49c7-85ec-d4333db4b86c" path="/var/lib/kubelet/pods/54460d78-28b8-49c7-85ec-d4333db4b86c/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.697714 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62ef8b2b-1672-4050-9d1a-0dbdef69fcae" path="/var/lib/kubelet/pods/62ef8b2b-1672-4050-9d1a-0dbdef69fcae/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.698580 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="700990cd-025d-4ee8-9fc1-8c82488ed341" path="/var/lib/kubelet/pods/700990cd-025d-4ee8-9fc1-8c82488ed341/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.709585 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7054d5a6-f005-40f2-bcfe-95835a0aa45d" path="/var/lib/kubelet/pods/7054d5a6-f005-40f2-bcfe-95835a0aa45d/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.710265 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fcf03f3-751b-4dd9-a44b-531164a3b4d4" path="/var/lib/kubelet/pods/7fcf03f3-751b-4dd9-a44b-531164a3b4d4/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.710821 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93c45ab9-7e10-4217-93fa-5d801fb55c6d" path="/var/lib/kubelet/pods/93c45ab9-7e10-4217-93fa-5d801fb55c6d/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.712784 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a436a8ee-6b7a-4ea5-9056-c289241686e6" path="/var/lib/kubelet/pods/a436a8ee-6b7a-4ea5-9056-c289241686e6/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.713345 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab07d00f-1829-4512-ba14-b9ed26a00fed" path="/var/lib/kubelet/pods/ab07d00f-1829-4512-ba14-b9ed26a00fed/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.713897 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8d8e91e-77e9-430e-b7c3-da6898073d0a" path="/var/lib/kubelet/pods/b8d8e91e-77e9-430e-b7c3-da6898073d0a/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.714474 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8845496-a1b9-4d8e-b60c-b657ceb619f5" path="/var/lib/kubelet/pods/c8845496-a1b9-4d8e-b60c-b657ceb619f5/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.715530 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb8cab67-3335-428b-bda5-a17d7bbed7df" path="/var/lib/kubelet/pods/cb8cab67-3335-428b-bda5-a17d7bbed7df/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.716143 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e83eccf7-20cf-4226-b994-5d00f3cef915" path="/var/lib/kubelet/pods/e83eccf7-20cf-4226-b994-5d00f3cef915/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.721337 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eadfca17-cc73-4d86-a5b4-6682fbe8d49d" path="/var/lib/kubelet/pods/eadfca17-cc73-4d86-a5b4-6682fbe8d49d/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.722781 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9fe524c-8ecb-4e39-a1f6-2544aeb74b41" path="/var/lib/kubelet/pods/f9fe524c-8ecb-4e39-a1f6-2544aeb74b41/volumes" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.723707 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.725219 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0af4-account-create-update-gctq5"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.725303 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-5xcwn"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.740037 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-5xcwn"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.764452 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-4ljts"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.774650 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5b5f5dd584-sql6t"] Jan 22 12:19:50 crc kubenswrapper[4773]: W0122 12:19:50.782670 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode46f52ea_4d7a_48b9_ac29_fbb7e326a2db.slice/crio-2e152729794ff239a596980d4e052d21e9b06a8b0242d5b92823bd203943d8f7 WatchSource:0}: Error finding container 2e152729794ff239a596980d4e052d21e9b06a8b0242d5b92823bd203943d8f7: Status 404 returned error can't find the container with id 2e152729794ff239a596980d4e052d21e9b06a8b0242d5b92823bd203943d8f7 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.786881 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-pdmmj"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.788422 4773 generic.go:334] "Generic (PLEG): container finished" podID="f681589a-ad29-4485-9313-7e63da547635" containerID="ec104a46c303372809d61350bbc1b44c16590837f54cef8d940afde155b334e5" exitCode=143 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.788539 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f681589a-ad29-4485-9313-7e63da547635","Type":"ContainerDied","Data":"ec104a46c303372809d61350bbc1b44c16590837f54cef8d940afde155b334e5"} Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.791976 4773 secret.go:188] Couldn't get secret openstack/barbican-api-config-data: secret "barbican-api-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.792035 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:51.29201987 +0000 UTC m=+1498.870135695 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-api-config-data" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.797220 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:50 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: if [ -n "neutron" ]; then Jan 22 12:19:50 crc kubenswrapper[4773]: GRANT_DATABASE="neutron" Jan 22 12:19:50 crc kubenswrapper[4773]: else Jan 22 12:19:50 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:50 crc kubenswrapper[4773]: fi Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:50 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:50 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:50 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:50 crc kubenswrapper[4773]: # support updates Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.799553 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-ef71-account-create-update-p6lhw" podUID="e46f52ea-4d7a-48b9-ac29-fbb7e326a2db" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.801097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" event={"ID":"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15","Type":"ContainerStarted","Data":"a56c5a91dc332b73b9b74b7e61f3cbcf16ecab195bd85059e7802e603ab81f56"} Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.803496 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.813264 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-pdmmj"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.813338 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" event={"ID":"e4868de7-6200-4cd9-948b-e0cdbbac5838","Type":"ContainerDied","Data":"3d8bb2689e108796068027a1395a177ed1c49c74fe51a0ece872707a44d388aa"} Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.813382 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d8bb2689e108796068027a1395a177ed1c49c74fe51a0ece872707a44d388aa" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.822226 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.822500 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-log" containerID="cri-o://f208616daee705d1c4d908c528f31148f8ced61b33147903c586028760cfb3c8" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.822643 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-api" containerID="cri-o://430c12aa11f46c2666087aed88a66b65f91c8e79648c0421d3f83f1922bda4ef" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.827907 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.831336 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a998-account-create-update-qmwlx"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.838953 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-n4t5s" event={"ID":"fbe5483b-39b1-4c4c-add8-d4ec00c26108","Type":"ContainerStarted","Data":"d8f05aadaf974c3a93b2a103e7c43adb2b1649ecbfb482d64bc1bf60fd9dc763"} Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.842705 4773 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-n4t5s" secret="" err="secret \"galera-openstack-cell1-dockercfg-wlmdx\" not found" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.848996 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.882029 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5f675cf6c9-7wz9g"] Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.909827 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-xzkhs operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/nova-cell1-8744-account-create-update-4ljts" podUID="5398d068-4617-4d25-bd5e-18fa8ae900cc" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.911114 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-85878465f6-ss54r"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.915231 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_37262aee-18bc-423e-9dac-272af09de237/ovsdbserver-sb/0.log" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.915354 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"37262aee-18bc-423e-9dac-272af09de237","Type":"ContainerDied","Data":"c923c11d90d22498df1d6fddb79a76237239776b9fed2ad945a5da95d599b530"} Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.915382 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c923c11d90d22498df1d6fddb79a76237239776b9fed2ad945a5da95d599b530" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.935017 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener-log" containerID="cri-o://28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.935670 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener" containerID="cri-o://1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e" gracePeriod=30 Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.939303 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.939359 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts podName:fbe5483b-39b1-4c4c-add8-d4ec00c26108 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:51.439340504 +0000 UTC m=+1499.017456329 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts") pod "root-account-create-update-n4t5s" (UID: "fbe5483b-39b1-4c4c-add8-d4ec00c26108") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.942924 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.953418 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5b5f5dd584-sql6t"] Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.957154 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerName="rabbitmq" containerID="cri-o://571196d2d3c006d03c5548409716302b0c8d1e601a474e5f6e89f54c68e781d2" gracePeriod=604800 Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.970841 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:50 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: if [ -n "" ]; then Jan 22 12:19:50 crc kubenswrapper[4773]: GRANT_DATABASE="" Jan 22 12:19:50 crc kubenswrapper[4773]: else Jan 22 12:19:50 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:50 crc kubenswrapper[4773]: fi Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:50 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:50 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:50 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:50 crc kubenswrapper[4773]: # support updates Jan 22 12:19:50 crc kubenswrapper[4773]: Jan 22 12:19:50 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:50 crc kubenswrapper[4773]: I0122 12:19:50.973615 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-sg972"] Jan 22 12:19:50 crc kubenswrapper[4773]: E0122 12:19:50.973682 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-n4t5s" podUID="fbe5483b-39b1-4c4c-add8-d4ec00c26108" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.014976 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_37262aee-18bc-423e-9dac-272af09de237/ovsdbserver-sb/0.log" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.015056 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.019907 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.020220 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-log" containerID="cri-o://2aaa460bcd03f0a9d75d04fec79f711d27405af19bd05d0be33e14861ac29dee" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.020476 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-metadata" containerID="cri-o://84743181e9bcf205d60cacb07eb6ab1bf0f5fac0214bec2c405bd822f753600e" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.050465 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-l8944"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.072525 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-sqv54"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081394 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="cf1b9f6bfafc8703b57d6396414ff00ca8d8f849a42e8a643dd6012f8b0ce046" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081428 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="08cda7f85172a8a3837740fbc97bd5edfa4527dcc2c0715be2ebaf2bac22e6b7" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081439 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="88011f552abb27872acf5ebae2c74881b96ee28ef4202e35cd34c74e6bc5e417" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081445 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="c18e352494406d6cec346bf8c21ed393d59ff5e3ef7616cda1e8dcff47f2665d" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081451 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="049e38f5a87408bec16aa9630dee0dfcbba21d9d01cf8238d9222592a966b9a8" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081457 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="d468ff37ff2469b69cb119a74db751875822b460315803fe8a44e5bcdf6c0ecf" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081463 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="dd9af132571b4bed486d397dfe0215f073e7f36c6b34aa87e560ea6a3f50a32e" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081469 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="72835dd244d879ebb03d43571334445f863b46f2f1f1ee2655d94a3e3907e8b5" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081475 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="fce77a5acf624e2bfa4fbd706de0a7bc45675dcc2a670de3f60e284d1156e388" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081481 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="186e2a68f59e8f83347891cfa964ee1ba154ccbd86503a5b5bcfcac2d36382b6" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081487 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="8614587119ff68cbab75cc2f49c17c865dbaaace22e54b8c99cb0c2c1183280b" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081497 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="4550c34e059dc4785794eb4b0ed847aef46b118c21bcc68b6e25d13c02b29550" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081565 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"cf1b9f6bfafc8703b57d6396414ff00ca8d8f849a42e8a643dd6012f8b0ce046"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081591 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"08cda7f85172a8a3837740fbc97bd5edfa4527dcc2c0715be2ebaf2bac22e6b7"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081612 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"88011f552abb27872acf5ebae2c74881b96ee28ef4202e35cd34c74e6bc5e417"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081621 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"c18e352494406d6cec346bf8c21ed393d59ff5e3ef7616cda1e8dcff47f2665d"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"049e38f5a87408bec16aa9630dee0dfcbba21d9d01cf8238d9222592a966b9a8"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081637 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"d468ff37ff2469b69cb119a74db751875822b460315803fe8a44e5bcdf6c0ecf"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081646 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"dd9af132571b4bed486d397dfe0215f073e7f36c6b34aa87e560ea6a3f50a32e"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081654 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"72835dd244d879ebb03d43571334445f863b46f2f1f1ee2655d94a3e3907e8b5"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081666 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"fce77a5acf624e2bfa4fbd706de0a7bc45675dcc2a670de3f60e284d1156e388"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081678 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"186e2a68f59e8f83347891cfa964ee1ba154ccbd86503a5b5bcfcac2d36382b6"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081688 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"8614587119ff68cbab75cc2f49c17c865dbaaace22e54b8c99cb0c2c1183280b"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.081700 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"4550c34e059dc4785794eb4b0ed847aef46b118c21bcc68b6e25d13c02b29550"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.084456 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-sqv54"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.087032 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" event={"ID":"81a2f40b-f1de-449b-9e0f-45171ffa318c","Type":"ContainerStarted","Data":"fa13ffc11bb0cb1376f9ff0ebf9a898643326cce516ce038d1c5c490dca06b27"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.094055 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7d56c9f5dc-qd9zs"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.094491 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker-log" containerID="cri-o://7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.095378 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker" containerID="cri-o://4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.102381 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5f675cf6c9-7wz9g"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.117999 4773 generic.go:334] "Generic (PLEG): container finished" podID="b9c73637-566a-47b5-bba6-97948a973a47" containerID="3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e" exitCode=143 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.118087 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9c73637-566a-47b5-bba6-97948a973a47","Type":"ContainerDied","Data":"3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.121295 4773 generic.go:334] "Generic (PLEG): container finished" podID="04725b89-f1ec-45f8-a69a-5427230da499" containerID="a2c743ccc8058456082f15d2a6c949e38df6e8c5dbd52d76888c3f1f8d965d57" exitCode=143 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.121334 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04725b89-f1ec-45f8-a69a-5427230da499","Type":"ContainerDied","Data":"a2c743ccc8058456082f15d2a6c949e38df6e8c5dbd52d76888c3f1f8d965d57"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.123974 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-57dd79856-kc66z"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.128426 4773 generic.go:334] "Generic (PLEG): container finished" podID="6c38a538-da2d-4097-9851-de6f8f2106c1" containerID="c508023fb186e6df6d8bd07919aae57bb3fd29d1d4aa3d274d27c206409ca260" exitCode=137 Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.129829 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data kube-api-access-fvcs6], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/barbican-api-57dd79856-kc66z" podUID="8510a6ae-d5e7-4da2-9351-1f0f848d6fef" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.133870 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-67f94f9664-cd544"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.134400 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-67f94f9664-cd544" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api-log" containerID="cri-o://2c623358850ff2ff54ddcf8841a4f6e0e66ef622633842e8bc1dabec1720076d" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.134822 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-67f94f9664-cd544" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api" containerID="cri-o://de6c3ff5cc24752d11a5ecf283955ba10ea0171066338d4427fd50840f65e7fb" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.143427 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145550 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-ovsdbserver-sb-tls-certs\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145748 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-metrics-certs-tls-certs\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145765 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-combined-ca-bundle\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145802 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78ngh\" (UniqueName: \"kubernetes.io/projected/37262aee-18bc-423e-9dac-272af09de237-kube-api-access-78ngh\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145821 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37262aee-18bc-423e-9dac-272af09de237-ovsdb-rundir\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145845 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-config\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145869 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-scripts\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.145895 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"37262aee-18bc-423e-9dac-272af09de237\" (UID: \"37262aee-18bc-423e-9dac-272af09de237\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.147149 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="40f482a8-ca15-4e6f-a4af-4579c5f66101" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://100e88af17884d8fb4d35904ad9c62fc7acae0e26e17f113782fb3fad537ae96" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.147450 4773 generic.go:334] "Generic (PLEG): container finished" podID="878a6ce0-f293-4690-9049-c90155c56ff3" containerID="ed05a9e5edc313b45c9c66ec916cf3d4f450ad7afe2d3bf8f73e940333547c9c" exitCode=143 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.147647 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-77688c4666-rzbr7" event={"ID":"878a6ce0-f293-4690-9049-c90155c56ff3","Type":"ContainerDied","Data":"ed05a9e5edc313b45c9c66ec916cf3d4f450ad7afe2d3bf8f73e940333547c9c"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.148429 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-config" (OuterVolumeSpecName: "config") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.148834 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-scripts" (OuterVolumeSpecName: "scripts") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.156691 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.157348 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37262aee-18bc-423e-9dac-272af09de237-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.175242 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-n4t5s"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.177659 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="40f482a8-ca15-4e6f-a4af-4579c5f66101" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.201:6080/vnc_lite.html\": dial tcp 10.217.0.201:6080: connect: connection refused" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.177922 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a998-account-create-update-qmwlx" event={"ID":"d5686dfc-618a-4996-a4b0-b1bd31365084","Type":"ContainerStarted","Data":"0802e2b1ad6241477fbb5b6a49043e2f9c520a05560405570572bb4997004361"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.183423 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.190450 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ef71-account-create-update-p6lhw"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.212820 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tjdqh_efd60909-f66c-4dc2-948f-5f63c735ab6e/openstack-network-exporter/0.log" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.212912 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-tjdqh" event={"ID":"efd60909-f66c-4dc2-948f-5f63c735ab6e","Type":"ContainerDied","Data":"4a771f88881f4b144c25b6c8db1b1748e494fc57b83b33e7fe93c75afa43c430"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.212936 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a771f88881f4b144c25b6c8db1b1748e494fc57b83b33e7fe93c75afa43c430" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.224822 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37262aee-18bc-423e-9dac-272af09de237-kube-api-access-78ngh" (OuterVolumeSpecName: "kube-api-access-78ngh") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "kube-api-access-78ngh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.225878 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jh5jz"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.227326 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.242518 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.242703 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerName="nova-cell1-conductor-conductor" containerID="cri-o://43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.249010 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.249038 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78ngh\" (UniqueName: \"kubernetes.io/projected/37262aee-18bc-423e-9dac-272af09de237-kube-api-access-78ngh\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.249050 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37262aee-18bc-423e-9dac-272af09de237-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.249059 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.249067 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37262aee-18bc-423e-9dac-272af09de237-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.249224 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.252582 4773 generic.go:334] "Generic (PLEG): container finished" podID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerID="b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.253182 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c54fcb95c-24djg" event={"ID":"a393de80-9ad0-413e-a2a1-6ee14de22049","Type":"ContainerDied","Data":"b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37"} Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.253675 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:51 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: if [ -n "placement" ]; then Jan 22 12:19:51 crc kubenswrapper[4773]: GRANT_DATABASE="placement" Jan 22 12:19:51 crc kubenswrapper[4773]: else Jan 22 12:19:51 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:51 crc kubenswrapper[4773]: fi Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:51 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:51 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:51 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:51 crc kubenswrapper[4773]: # support updates Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.255274 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack/placement-a998-account-create-update-qmwlx" podUID="d5686dfc-618a-4996-a4b0-b1bd31365084" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.260409 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_acfbd9c7-d136-4478-a181-7e9fb3033557/ovsdbserver-nb/0.log" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.265824 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"acfbd9c7-d136-4478-a181-7e9fb3033557","Type":"ContainerDied","Data":"fbfaf9c64b19faa0bea647961cb3d7945a89634b2e9731dd67e3bce7fea639f9"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.265876 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbfaf9c64b19faa0bea647961cb3d7945a89634b2e9731dd67e3bce7fea639f9" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.272056 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="galera" containerID="cri-o://ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.278819 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-tjdqh_efd60909-f66c-4dc2-948f-5f63c735ab6e/openstack-network-exporter/0.log" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.279493 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.289854 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jh5jz"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.292320 4773 generic.go:334] "Generic (PLEG): container finished" podID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.292406 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerDied","Data":"705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.292662 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.306713 4773 generic.go:334] "Generic (PLEG): container finished" podID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerID="231d892f6fe508e5896980a2b5938e377e1f57b840196329664f9ef1b5b26d99" exitCode=0 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.307096 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"aed7f398-6b73-4830-aa88-db87be2b99a0","Type":"ContainerDied","Data":"231d892f6fe508e5896980a2b5938e377e1f57b840196329664f9ef1b5b26d99"} Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.317387 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_acfbd9c7-d136-4478-a181-7e9fb3033557/ovsdbserver-nb/0.log" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.317494 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.317893 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="1683acba-c129-4a7c-866c-421cdb0e6505" containerName="nova-cell0-conductor-conductor" containerID="cri-o://8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.317943 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.339627 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n2bkh"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.354655 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovn-rundir\") pod \"efd60909-f66c-4dc2-948f-5f63c735ab6e\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.354821 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "efd60909-f66c-4dc2-948f-5f63c735ab6e" (UID: "efd60909-f66c-4dc2-948f-5f63c735ab6e"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.354917 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-metrics-certs-tls-certs\") pod \"efd60909-f66c-4dc2-948f-5f63c735ab6e\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.355000 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovs-rundir\") pod \"efd60909-f66c-4dc2-948f-5f63c735ab6e\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.355041 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd60909-f66c-4dc2-948f-5f63c735ab6e-config\") pod \"efd60909-f66c-4dc2-948f-5f63c735ab6e\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.355161 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-combined-ca-bundle\") pod \"efd60909-f66c-4dc2-948f-5f63c735ab6e\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.355231 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dklht\" (UniqueName: \"kubernetes.io/projected/efd60909-f66c-4dc2-948f-5f63c735ab6e-kube-api-access-dklht\") pod \"efd60909-f66c-4dc2-948f-5f63c735ab6e\" (UID: \"efd60909-f66c-4dc2-948f-5f63c735ab6e\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.356271 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd60909-f66c-4dc2-948f-5f63c735ab6e-config" (OuterVolumeSpecName: "config") pod "efd60909-f66c-4dc2-948f-5f63c735ab6e" (UID: "efd60909-f66c-4dc2-948f-5f63c735ab6e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.356485 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.358426 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "efd60909-f66c-4dc2-948f-5f63c735ab6e" (UID: "efd60909-f66c-4dc2-948f-5f63c735ab6e"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.359105 4773 secret.go:188] Couldn't get secret openstack/barbican-api-config-data: secret "barbican-api-config-data" not found Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.359178 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:52.359161215 +0000 UTC m=+1499.937277040 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-api-config-data" not found Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.365740 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efd60909-f66c-4dc2-948f-5f63c735ab6e-kube-api-access-dklht" (OuterVolumeSpecName: "kube-api-access-dklht") pod "efd60909-f66c-4dc2-948f-5f63c735ab6e" (UID: "efd60909-f66c-4dc2-948f-5f63c735ab6e"). InnerVolumeSpecName "kube-api-access-dklht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.384615 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-n2bkh"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.388964 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.426417 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.442494 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="rabbitmq" containerID="cri-o://2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c" gracePeriod=604800 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457257 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-nb\") pod \"e4868de7-6200-4cd9-948b-e0cdbbac5838\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457335 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-config\") pod \"e4868de7-6200-4cd9-948b-e0cdbbac5838\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457369 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-svc\") pod \"e4868de7-6200-4cd9-948b-e0cdbbac5838\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457393 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457438 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-scripts\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457475 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-config\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457527 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdb-rundir\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457549 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-swift-storage-0\") pod \"e4868de7-6200-4cd9-948b-e0cdbbac5838\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457581 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5kfv\" (UniqueName: \"kubernetes.io/projected/e4868de7-6200-4cd9-948b-e0cdbbac5838-kube-api-access-d5kfv\") pod \"e4868de7-6200-4cd9-948b-e0cdbbac5838\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457623 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-combined-ca-bundle\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457652 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-metrics-certs-tls-certs\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457708 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-sb\") pod \"e4868de7-6200-4cd9-948b-e0cdbbac5838\" (UID: \"e4868de7-6200-4cd9-948b-e0cdbbac5838\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457747 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdbserver-nb-tls-certs\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.457779 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6lml\" (UniqueName: \"kubernetes.io/projected/acfbd9c7-d136-4478-a181-7e9fb3033557-kube-api-access-h6lml\") pod \"acfbd9c7-d136-4478-a181-7e9fb3033557\" (UID: \"acfbd9c7-d136-4478-a181-7e9fb3033557\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.458219 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.458231 4773 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/efd60909-f66c-4dc2-948f-5f63c735ab6e-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.458241 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd60909-f66c-4dc2-948f-5f63c735ab6e-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.458251 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dklht\" (UniqueName: \"kubernetes.io/projected/efd60909-f66c-4dc2-948f-5f63c735ab6e-kube-api-access-dklht\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.458688 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-scripts" (OuterVolumeSpecName: "scripts") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.459232 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.459287 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts podName:fbe5483b-39b1-4c4c-add8-d4ec00c26108 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:52.459270428 +0000 UTC m=+1500.037386243 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts") pod "root-account-create-update-n4t5s" (UID: "fbe5483b-39b1-4c4c-add8-d4ec00c26108") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.462614 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-config" (OuterVolumeSpecName: "config") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.462895 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.499478 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.508820 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.518501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acfbd9c7-d136-4478-a181-7e9fb3033557-kube-api-access-h6lml" (OuterVolumeSpecName: "kube-api-access-h6lml") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "kube-api-access-h6lml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.535450 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "efd60909-f66c-4dc2-948f-5f63c735ab6e" (UID: "efd60909-f66c-4dc2-948f-5f63c735ab6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.536040 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4868de7-6200-4cd9-948b-e0cdbbac5838-kube-api-access-d5kfv" (OuterVolumeSpecName: "kube-api-access-d5kfv") pod "e4868de7-6200-4cd9-948b-e0cdbbac5838" (UID: "e4868de7-6200-4cd9-948b-e0cdbbac5838"). InnerVolumeSpecName "kube-api-access-d5kfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.563122 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config-secret\") pod \"6c38a538-da2d-4097-9851-de6f8f2106c1\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.563359 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config\") pod \"6c38a538-da2d-4097-9851-de6f8f2106c1\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.563438 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2z2p\" (UniqueName: \"kubernetes.io/projected/6c38a538-da2d-4097-9851-de6f8f2106c1-kube-api-access-r2z2p\") pod \"6c38a538-da2d-4097-9851-de6f8f2106c1\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.563569 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-combined-ca-bundle\") pod \"6c38a538-da2d-4097-9851-de6f8f2106c1\" (UID: \"6c38a538-da2d-4097-9851-de6f8f2106c1\") " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.563998 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzkhs\" (UniqueName: \"kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564093 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564161 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6lml\" (UniqueName: \"kubernetes.io/projected/acfbd9c7-d136-4478-a181-7e9fb3033557-kube-api-access-h6lml\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564177 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564189 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564211 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564221 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564230 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acfbd9c7-d136-4478-a181-7e9fb3033557-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564239 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.564248 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5kfv\" (UniqueName: \"kubernetes.io/projected/e4868de7-6200-4cd9-948b-e0cdbbac5838-kube-api-access-d5kfv\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.572907 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.572985 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:53.57296545 +0000 UTC m=+1501.151081285 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.583647 4773 projected.go:194] Error preparing data for projected volume kube-api-access-xzkhs for pod openstack/nova-cell1-8744-account-create-update-4ljts: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.583721 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:53.583702681 +0000 UTC m=+1501.161818506 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-xzkhs" (UniqueName: "kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.593505 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c38a538-da2d-4097-9851-de6f8f2106c1-kube-api-access-r2z2p" (OuterVolumeSpecName: "kube-api-access-r2z2p") pod "6c38a538-da2d-4097-9851-de6f8f2106c1" (UID: "6c38a538-da2d-4097-9851-de6f8f2106c1"). InnerVolumeSpecName "kube-api-access-r2z2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.597963 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.615952 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.647412 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c38a538-da2d-4097-9851-de6f8f2106c1" (UID: "6c38a538-da2d-4097-9851-de6f8f2106c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.662405 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0af4-account-create-update-gctq5"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.667693 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.667946 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.667955 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.667963 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2z2p\" (UniqueName: \"kubernetes.io/projected/6c38a538-da2d-4097-9851-de6f8f2106c1-kube-api-access-r2z2p\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.668027 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.668074 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data podName:a688a76d-2498-4542-8285-709caf211e8a nodeName:}" failed. No retries permitted until 2026-01-22 12:19:53.668058932 +0000 UTC m=+1501.246174757 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data") pod "rabbitmq-server-0" (UID: "a688a76d-2498-4542-8285-709caf211e8a") : configmap "rabbitmq-config-data" not found Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.719134 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e4868de7-6200-4cd9-948b-e0cdbbac5838" (UID: "e4868de7-6200-4cd9-948b-e0cdbbac5838"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.742599 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:51 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: if [ -n "barbican" ]; then Jan 22 12:19:51 crc kubenswrapper[4773]: GRANT_DATABASE="barbican" Jan 22 12:19:51 crc kubenswrapper[4773]: else Jan 22 12:19:51 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:51 crc kubenswrapper[4773]: fi Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:51 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:51 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:51 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:51 crc kubenswrapper[4773]: # support updates Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.743684 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-0af4-account-create-update-gctq5" podUID="bfff6d4b-eee9-4ceb-a420-512d1ba28760" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.770088 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.777901 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-config" (OuterVolumeSpecName: "config") pod "e4868de7-6200-4cd9-948b-e0cdbbac5838" (UID: "e4868de7-6200-4cd9-948b-e0cdbbac5838"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.822739 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6f844cff7c-gmg8h"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.822988 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6f844cff7c-gmg8h" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-httpd" containerID="cri-o://521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.823071 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6f844cff7c-gmg8h" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-server" containerID="cri-o://aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23" gracePeriod=30 Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.841123 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.843014 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.845224 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.845269 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerName="nova-cell1-conductor-conductor" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.871672 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.921589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "37262aee-18bc-423e-9dac-272af09de237" (UID: "37262aee-18bc-423e-9dac-272af09de237"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.930201 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-sg972"] Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.956348 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "6c38a538-da2d-4097-9851-de6f8f2106c1" (UID: "6c38a538-da2d-4097-9851-de6f8f2106c1"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.974780 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: I0122 12:19:51.974811 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37262aee-18bc-423e-9dac-272af09de237-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.979595 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:51 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: if [ -n "nova_api" ]; then Jan 22 12:19:51 crc kubenswrapper[4773]: GRANT_DATABASE="nova_api" Jan 22 12:19:51 crc kubenswrapper[4773]: else Jan 22 12:19:51 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:51 crc kubenswrapper[4773]: fi Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:51 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:51 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:51 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:51 crc kubenswrapper[4773]: # support updates Jan 22 12:19:51 crc kubenswrapper[4773]: Jan 22 12:19:51 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:51 crc kubenswrapper[4773]: E0122 12:19:51.981537 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-8cf3-account-create-update-sg972" podUID="d367a46c-d551-46d3-b85f-a4499a1c4c0f" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.033434 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.042879 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.044495 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.044546 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="ovn-northd" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.087386 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.121641 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "6c38a538-da2d-4097-9851-de6f8f2106c1" (UID: "6c38a538-da2d-4097-9851-de6f8f2106c1"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.129168 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "efd60909-f66c-4dc2-948f-5f63c735ab6e" (UID: "efd60909-f66c-4dc2-948f-5f63c735ab6e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.148718 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-l8944"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.167571 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:52 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: if [ -n "nova_cell0" ]; then Jan 22 12:19:52 crc kubenswrapper[4773]: GRANT_DATABASE="nova_cell0" Jan 22 12:19:52 crc kubenswrapper[4773]: else Jan 22 12:19:52 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:52 crc kubenswrapper[4773]: fi Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:52 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:52 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:52 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:52 crc kubenswrapper[4773]: # support updates Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.169740 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e4868de7-6200-4cd9-948b-e0cdbbac5838" (UID: "e4868de7-6200-4cd9-948b-e0cdbbac5838"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.169743 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack/nova-cell0-eb71-account-create-update-l8944" podUID="fbc373ba-8a4b-44be-a687-63de038d5fa3" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.180498 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e4868de7-6200-4cd9-948b-e0cdbbac5838" (UID: "e4868de7-6200-4cd9-948b-e0cdbbac5838"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.182204 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e4868de7-6200-4cd9-948b-e0cdbbac5838" (UID: "e4868de7-6200-4cd9-948b-e0cdbbac5838"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.195198 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/efd60909-f66c-4dc2-948f-5f63c735ab6e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.195227 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.195240 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.195252 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6c38a538-da2d-4097-9851-de6f8f2106c1-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.195262 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.195273 4773 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4868de7-6200-4cd9-948b-e0cdbbac5838-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.227634 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "acfbd9c7-d136-4478-a181-7e9fb3033557" (UID: "acfbd9c7-d136-4478-a181-7e9fb3033557"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.297330 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acfbd9c7-d136-4478-a181-7e9fb3033557-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.343539 4773 generic.go:334] "Generic (PLEG): container finished" podID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerID="7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527" exitCode=143 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.343623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" event={"ID":"a5dc7991-ffde-4ef2-9668-e07d7c4aa614","Type":"ContainerDied","Data":"7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.343990 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.346271 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" event={"ID":"81a2f40b-f1de-449b-9e0f-45171ffa318c","Type":"ContainerStarted","Data":"46a2db3b91ee50e8ade9df5dbd38ca1fb9b622a943ee6732a2160bba788b0474"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.346314 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" event={"ID":"81a2f40b-f1de-449b-9e0f-45171ffa318c","Type":"ContainerStarted","Data":"87124795355c6f0d6915d6a49102ef60c2f5d2076a9656aff323f785f19478f7"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.346417 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker-log" containerID="cri-o://87124795355c6f0d6915d6a49102ef60c2f5d2076a9656aff323f785f19478f7" gracePeriod=30 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.346613 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker" containerID="cri-o://46a2db3b91ee50e8ade9df5dbd38ca1fb9b622a943ee6732a2160bba788b0474" gracePeriod=30 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.355880 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8cf3-account-create-update-sg972" event={"ID":"d367a46c-d551-46d3-b85f-a4499a1c4c0f","Type":"ContainerStarted","Data":"da393a3992304732dc4631406b6cc24582aca381d3bd6bc28e2dbef1f1eb2d9f"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.372371 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.372379 4773 scope.go:117] "RemoveContainer" containerID="c508023fb186e6df6d8bd07919aae57bb3fd29d1d4aa3d274d27c206409ca260" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.391266 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0af4-account-create-update-gctq5" event={"ID":"bfff6d4b-eee9-4ceb-a420-512d1ba28760","Type":"ContainerStarted","Data":"7e85a9c4ece38ecb12424cefd99a202564c42b0b73d16ddd94d5b773db5c6d1c"} Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.403510 4773 secret.go:188] Couldn't get secret openstack/barbican-api-config-data: secret "barbican-api-config-data" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.403579 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:54.403564202 +0000 UTC m=+1501.981680027 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-api-config-data" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.403943 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.403975 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data podName:a4c14d2f-5507-4d08-be37-55d77b5491a3 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:56.403967093 +0000 UTC m=+1503.982082918 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data") pod "rabbitmq-cell1-server-0" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.403993 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" podStartSLOduration=5.403969073 podStartE2EDuration="5.403969073s" podCreationTimestamp="2026-01-22 12:19:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:19:52.398082469 +0000 UTC m=+1499.976198294" watchObservedRunningTime="2026-01-22 12:19:52.403969073 +0000 UTC m=+1499.982084898" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.425064 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" event={"ID":"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15","Type":"ContainerStarted","Data":"dbc1b5a000ae79e34485e55c8fd8281fd6a21a3c49f844792e4c8e5d1d098c81"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.425130 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" event={"ID":"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15","Type":"ContainerStarted","Data":"889e6af5b48631940b0b05bbbc915b42895876b4aa8e1b9378483400c385f7a5"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.425329 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener-log" containerID="cri-o://889e6af5b48631940b0b05bbbc915b42895876b4aa8e1b9378483400c385f7a5" gracePeriod=30 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.426091 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener" containerID="cri-o://dbc1b5a000ae79e34485e55c8fd8281fd6a21a3c49f844792e4c8e5d1d098c81" gracePeriod=30 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.476857 4773 generic.go:334] "Generic (PLEG): container finished" podID="637a651c-e338-45ba-8bd3-a8f838500893" containerID="f208616daee705d1c4d908c528f31148f8ced61b33147903c586028760cfb3c8" exitCode=143 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.477149 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a651c-e338-45ba-8bd3-a8f838500893","Type":"ContainerDied","Data":"f208616daee705d1c4d908c528f31148f8ced61b33147903c586028760cfb3c8"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.519921 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m22r\" (UniqueName: \"kubernetes.io/projected/885b1538-0800-47c6-bedd-284b67cd08ca-kube-api-access-7m22r\") pod \"885b1538-0800-47c6-bedd-284b67cd08ca\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.520061 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data\") pod \"885b1538-0800-47c6-bedd-284b67cd08ca\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.520123 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885b1538-0800-47c6-bedd-284b67cd08ca-logs\") pod \"885b1538-0800-47c6-bedd-284b67cd08ca\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.520157 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-combined-ca-bundle\") pod \"885b1538-0800-47c6-bedd-284b67cd08ca\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.520242 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data-custom\") pod \"885b1538-0800-47c6-bedd-284b67cd08ca\" (UID: \"885b1538-0800-47c6-bedd-284b67cd08ca\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.521246 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvcs6\" (UniqueName: \"kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.521321 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data\") pod \"barbican-api-57dd79856-kc66z\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.523003 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/885b1538-0800-47c6-bedd-284b67cd08ca-logs" (OuterVolumeSpecName: "logs") pod "885b1538-0800-47c6-bedd-284b67cd08ca" (UID: "885b1538-0800-47c6-bedd-284b67cd08ca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.529650 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.529720 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts podName:fbe5483b-39b1-4c4c-add8-d4ec00c26108 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:54.529699113 +0000 UTC m=+1502.107814938 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts") pod "root-account-create-update-n4t5s" (UID: "fbe5483b-39b1-4c4c-add8-d4ec00c26108") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.530018 4773 secret.go:188] Couldn't get secret openstack/barbican-config-data: secret "barbican-config-data" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.530070 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:56.530057393 +0000 UTC m=+1504.108173218 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : secret "barbican-config-data" not found Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.530356 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/885b1538-0800-47c6-bedd-284b67cd08ca-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.534746 4773 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.534994 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts podName:757f37a0-3cc7-4d57-a956-83d236d6cebc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:56.534840227 +0000 UTC m=+1504.112956052 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts") pod "ovn-controller-rp6dh" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc") : configmap "ovncontroller-scripts" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.536763 4773 projected.go:194] Error preparing data for projected volume kube-api-access-fvcs6 for pod openstack/barbican-api-57dd79856-kc66z: failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.536833 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6 podName:8510a6ae-d5e7-4da2-9351-1f0f848d6fef nodeName:}" failed. No retries permitted until 2026-01-22 12:19:56.536813972 +0000 UTC m=+1504.114929797 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-fvcs6" (UniqueName: "kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6") pod "barbican-api-57dd79856-kc66z" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef") : failed to fetch token: serviceaccounts "barbican-barbican" not found Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.551390 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" podStartSLOduration=5.55137252 podStartE2EDuration="5.55137252s" podCreationTimestamp="2026-01-22 12:19:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 12:19:52.459504098 +0000 UTC m=+1500.037619943" watchObservedRunningTime="2026-01-22 12:19:52.55137252 +0000 UTC m=+1500.129488345" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.566593 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/885b1538-0800-47c6-bedd-284b67cd08ca-kube-api-access-7m22r" (OuterVolumeSpecName: "kube-api-access-7m22r") pod "885b1538-0800-47c6-bedd-284b67cd08ca" (UID: "885b1538-0800-47c6-bedd-284b67cd08ca"). InnerVolumeSpecName "kube-api-access-7m22r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.569337 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "885b1538-0800-47c6-bedd-284b67cd08ca" (UID: "885b1538-0800-47c6-bedd-284b67cd08ca"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.569597 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.569931 4773 generic.go:334] "Generic (PLEG): container finished" podID="311884d0-65e1-4737-a7ba-efc94510b90b" containerID="2aaa460bcd03f0a9d75d04fec79f711d27405af19bd05d0be33e14861ac29dee" exitCode=143 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.571396 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"311884d0-65e1-4737-a7ba-efc94510b90b","Type":"ContainerDied","Data":"2aaa460bcd03f0a9d75d04fec79f711d27405af19bd05d0be33e14861ac29dee"} Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.576851 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.580817 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29 is running failed: container process not found" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.588366 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29 is running failed: container process not found" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.588539 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ef71-account-create-update-p6lhw" event={"ID":"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db","Type":"ContainerStarted","Data":"2e152729794ff239a596980d4e052d21e9b06a8b0242d5b92823bd203943d8f7"} Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.594562 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29 is running failed: container process not found" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.594606 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29 is running failed: container process not found" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="galera" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.612647 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.612714 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="1683acba-c129-4a7c-866c-421cdb0e6505" containerName="nova-cell0-conductor-conductor" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.620016 4773 generic.go:334] "Generic (PLEG): container finished" podID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerID="2c623358850ff2ff54ddcf8841a4f6e0e66ef622633842e8bc1dabec1720076d" exitCode=143 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.620079 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67f94f9664-cd544" event={"ID":"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e","Type":"ContainerDied","Data":"2c623358850ff2ff54ddcf8841a4f6e0e66ef622633842e8bc1dabec1720076d"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.631407 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data" (OuterVolumeSpecName: "config-data") pod "885b1538-0800-47c6-bedd-284b67cd08ca" (UID: "885b1538-0800-47c6-bedd-284b67cd08ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.620279 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.633767 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m22r\" (UniqueName: \"kubernetes.io/projected/885b1538-0800-47c6-bedd-284b67cd08ca-kube-api-access-7m22r\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.633797 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.633913 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.662432 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "885b1538-0800-47c6-bedd-284b67cd08ca" (UID: "885b1538-0800-47c6-bedd-284b67cd08ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.703883 4773 generic.go:334] "Generic (PLEG): container finished" podID="885b1538-0800-47c6-bedd-284b67cd08ca" containerID="1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e" exitCode=0 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.703924 4773 generic.go:334] "Generic (PLEG): container finished" podID="885b1538-0800-47c6-bedd-284b67cd08ca" containerID="28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58" exitCode=143 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.704053 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.737353 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ba7d0b8-1d6d-4630-b574-7d0e55409e25" path="/var/lib/kubelet/pods/5ba7d0b8-1d6d-4630-b574-7d0e55409e25/volumes" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.737710 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9frq\" (UniqueName: \"kubernetes.io/projected/40f482a8-ca15-4e6f-a4af-4579c5f66101-kube-api-access-d9frq\") pod \"40f482a8-ca15-4e6f-a4af-4579c5f66101\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.737864 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-nova-novncproxy-tls-certs\") pod \"40f482a8-ca15-4e6f-a4af-4579c5f66101\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.737951 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-combined-ca-bundle\") pod \"40f482a8-ca15-4e6f-a4af-4579c5f66101\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.737999 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-config-data\") pod \"40f482a8-ca15-4e6f-a4af-4579c5f66101\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.738098 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-vencrypt-tls-certs\") pod \"40f482a8-ca15-4e6f-a4af-4579c5f66101\" (UID: \"40f482a8-ca15-4e6f-a4af-4579c5f66101\") " Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.741449 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e274f9a-f487-4a76-864b-38f0b4e80ed9" path="/var/lib/kubelet/pods/5e274f9a-f487-4a76-864b-38f0b4e80ed9/volumes" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.744571 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c38a538-da2d-4097-9851-de6f8f2106c1" path="/var/lib/kubelet/pods/6c38a538-da2d-4097-9851-de6f8f2106c1/volumes" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.744950 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885b1538-0800-47c6-bedd-284b67cd08ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.745834 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9031d071-6292-4367-abab-a0be539a3914" path="/var/lib/kubelet/pods/9031d071-6292-4367-abab-a0be539a3914/volumes" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.746447 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e18e0db7-6f23-4114-82cd-3d1d393db415" path="/var/lib/kubelet/pods/e18e0db7-6f23-4114-82cd-3d1d393db415/volumes" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.747003 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8bbf2c9-35f6-4c9e-a959-6977302e59aa" path="/var/lib/kubelet/pods/f8bbf2c9-35f6-4c9e-a959-6977302e59aa/volumes" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.753479 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" event={"ID":"885b1538-0800-47c6-bedd-284b67cd08ca","Type":"ContainerDied","Data":"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.753533 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" event={"ID":"885b1538-0800-47c6-bedd-284b67cd08ca","Type":"ContainerDied","Data":"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.753546 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-85878465f6-ss54r" event={"ID":"885b1538-0800-47c6-bedd-284b67cd08ca","Type":"ContainerDied","Data":"3ca8e34c1aece56e84463dab57c063eb49ccf046d864db450b64d581f5177856"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.753620 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-97pzr"] Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754151 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f482a8-ca15-4e6f-a4af-4579c5f66101" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754173 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f482a8-ca15-4e6f-a4af-4579c5f66101" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754187 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754196 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754222 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="ovsdbserver-nb" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754230 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="ovsdbserver-nb" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754245 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerName="dnsmasq-dns" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754254 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerName="dnsmasq-dns" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754263 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754270 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754338 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd60909-f66c-4dc2-948f-5f63c735ab6e" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754347 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd60909-f66c-4dc2-948f-5f63c735ab6e" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754370 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerName="init" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754378 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerName="init" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754396 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754404 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754418 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener-log" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754426 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener-log" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.754445 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="ovsdbserver-sb" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754453 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="ovsdbserver-sb" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754708 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="efd60909-f66c-4dc2-948f-5f63c735ab6e" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754725 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="ovsdbserver-nb" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754746 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754758 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="37262aee-18bc-423e-9dac-272af09de237" containerName="ovsdbserver-sb" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754770 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f482a8-ca15-4e6f-a4af-4579c5f66101" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754783 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" containerName="dnsmasq-dns" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754792 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754810 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" containerName="openstack-network-exporter" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.754824 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" containerName="barbican-keystone-listener-log" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.755633 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.759372 4773 scope.go:117] "RemoveContainer" containerID="1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.762539 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.764125 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-97pzr"] Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.787270 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-85878465f6-ss54r"] Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.794484 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40f482a8-ca15-4e6f-a4af-4579c5f66101-kube-api-access-d9frq" (OuterVolumeSpecName: "kube-api-access-d9frq") pod "40f482a8-ca15-4e6f-a4af-4579c5f66101" (UID: "40f482a8-ca15-4e6f-a4af-4579c5f66101"). InnerVolumeSpecName "kube-api-access-d9frq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.796411 4773 generic.go:334] "Generic (PLEG): container finished" podID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerID="521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6" exitCode=0 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.796654 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6f844cff7c-gmg8h" event={"ID":"525ebe62-dc27-40fa-97a9-31346c6145a1","Type":"ContainerDied","Data":"521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.797430 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-85878465f6-ss54r"] Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.809565 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-eb71-account-create-update-l8944" event={"ID":"fbc373ba-8a4b-44be-a687-63de038d5fa3","Type":"ContainerStarted","Data":"dd71f9cbd55aa8d64a6e64f558ed169ca5c6e050894e4edcf87c3d62aa22badf"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.824532 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40f482a8-ca15-4e6f-a4af-4579c5f66101" (UID: "40f482a8-ca15-4e6f-a4af-4579c5f66101"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.832398 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-config-data" (OuterVolumeSpecName: "config-data") pod "40f482a8-ca15-4e6f-a4af-4579c5f66101" (UID: "40f482a8-ca15-4e6f-a4af-4579c5f66101"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.851170 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfdfz\" (UniqueName: \"kubernetes.io/projected/bda45287-f56e-4031-addb-068efe159a75-kube-api-access-xfdfz\") pod \"root-account-create-update-97pzr\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.851413 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bda45287-f56e-4031-addb-068efe159a75-operator-scripts\") pod \"root-account-create-update-97pzr\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.851651 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9frq\" (UniqueName: \"kubernetes.io/projected/40f482a8-ca15-4e6f-a4af-4579c5f66101-kube-api-access-d9frq\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.851671 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.851682 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.852313 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "40f482a8-ca15-4e6f-a4af-4579c5f66101" (UID: "40f482a8-ca15-4e6f-a4af-4579c5f66101"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.865603 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="dd185ed78ea8d0ba5be7c159d3c9510adff0ac36b806be4c8fe7fd52820b4ab1" exitCode=0 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.865630 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="e9546b98e820ca3d0bc3c7f7f6af1821b37a056264052a38280687379bf170a4" exitCode=0 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.865680 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"dd185ed78ea8d0ba5be7c159d3c9510adff0ac36b806be4c8fe7fd52820b4ab1"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.865706 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"e9546b98e820ca3d0bc3c7f7f6af1821b37a056264052a38280687379bf170a4"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.873509 4773 generic.go:334] "Generic (PLEG): container finished" podID="40f482a8-ca15-4e6f-a4af-4579c5f66101" containerID="100e88af17884d8fb4d35904ad9c62fc7acae0e26e17f113782fb3fad537ae96" exitCode=0 Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.873615 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.873922 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.874843 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.874993 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"40f482a8-ca15-4e6f-a4af-4579c5f66101","Type":"ContainerDied","Data":"100e88af17884d8fb4d35904ad9c62fc7acae0e26e17f113782fb3fad537ae96"} Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.875037 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-82zlq" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.875535 4773 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-n4t5s" secret="" err="secret \"galera-openstack-cell1-dockercfg-wlmdx\" not found" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.875822 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.876255 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.882066 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-tjdqh" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.888672 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:52 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: if [ -n "" ]; then Jan 22 12:19:52 crc kubenswrapper[4773]: GRANT_DATABASE="" Jan 22 12:19:52 crc kubenswrapper[4773]: else Jan 22 12:19:52 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:52 crc kubenswrapper[4773]: fi Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:52 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:52 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:52 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:52 crc kubenswrapper[4773]: # support updates Jan 22 12:19:52 crc kubenswrapper[4773]: Jan 22 12:19:52 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:52 crc kubenswrapper[4773]: E0122 12:19:52.892376 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-n4t5s" podUID="fbe5483b-39b1-4c4c-add8-d4ec00c26108" Jan 22 12:19:52 crc kubenswrapper[4773]: I0122 12:19:52.944792 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "40f482a8-ca15-4e6f-a4af-4579c5f66101" (UID: "40f482a8-ca15-4e6f-a4af-4579c5f66101"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:52.957002 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfdfz\" (UniqueName: \"kubernetes.io/projected/bda45287-f56e-4031-addb-068efe159a75-kube-api-access-xfdfz\") pod \"root-account-create-update-97pzr\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:52.957163 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bda45287-f56e-4031-addb-068efe159a75-operator-scripts\") pod \"root-account-create-update-97pzr\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:52.957245 4773 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:52.957260 4773 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f482a8-ca15-4e6f-a4af-4579c5f66101-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:52.958437 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bda45287-f56e-4031-addb-068efe159a75-operator-scripts\") pod \"root-account-create-update-97pzr\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:52.981807 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfdfz\" (UniqueName: \"kubernetes.io/projected/bda45287-f56e-4031-addb-068efe159a75-kube-api-access-xfdfz\") pod \"root-account-create-update-97pzr\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.481440 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-5c54fcb95c-24djg" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.168:9696/\": dial tcp 10.217.0.168:9696: connect: connection refused" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.521665 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.529743 4773 scope.go:117] "RemoveContainer" containerID="28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.601117 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-rp6dh" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" probeResult="failure" output=< Jan 22 12:19:53 crc kubenswrapper[4773]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Jan 22 12:19:53 crc kubenswrapper[4773]: > Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.620348 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.620445 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.638230 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.641337 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.641721 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.641887 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.643222 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.643349 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.661923 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.671858 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-internal-tls-certs\") pod \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.672150 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-public-tls-certs\") pod \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.672201 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-logs\") pod \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.672444 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-combined-ca-bundle\") pod \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.672663 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-logs" (OuterVolumeSpecName: "logs") pod "8510a6ae-d5e7-4da2-9351-1f0f848d6fef" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.672841 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom\") pod \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\" (UID: \"8510a6ae-d5e7-4da2-9351-1f0f848d6fef\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.676704 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzkhs\" (UniqueName: \"kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.681198 4773 projected.go:194] Error preparing data for projected volume kube-api-access-xzkhs for pod openstack/nova-cell1-8744-account-create-update-4ljts: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.681269 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:57.681249669 +0000 UTC m=+1505.259365494 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-xzkhs" (UniqueName: "kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.681842 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.681888 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data podName:a688a76d-2498-4542-8285-709caf211e8a nodeName:}" failed. No retries permitted until 2026-01-22 12:19:57.681875047 +0000 UTC m=+1505.259990872 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data") pod "rabbitmq-server-0" (UID: "a688a76d-2498-4542-8285-709caf211e8a") : configmap "rabbitmq-config-data" not found Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.681914 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts\") pod \"nova-cell1-8744-account-create-update-4ljts\" (UID: \"5398d068-4617-4d25-bd5e-18fa8ae900cc\") " pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.681985 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.682004 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts podName:5398d068-4617-4d25-bd5e-18fa8ae900cc nodeName:}" failed. No retries permitted until 2026-01-22 12:19:57.68199798 +0000 UTC m=+1505.260113805 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts") pod "nova-cell1-8744-account-create-update-4ljts" (UID: "5398d068-4617-4d25-bd5e-18fa8ae900cc") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.682105 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.691813 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.692604 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.711487 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8510a6ae-d5e7-4da2-9351-1f0f848d6fef" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.717442 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.718825 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8510a6ae-d5e7-4da2-9351-1f0f848d6fef" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.724220 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8510a6ae-d5e7-4da2-9351-1f0f848d6fef" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.731383 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.734478 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8510a6ae-d5e7-4da2-9351-1f0f848d6fef" (UID: "8510a6ae-d5e7-4da2-9351-1f0f848d6fef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.759466 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.802813 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.804434 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.805692 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbzzj\" (UniqueName: \"kubernetes.io/projected/bfff6d4b-eee9-4ceb-a420-512d1ba28760-kube-api-access-kbzzj\") pod \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.805987 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfff6d4b-eee9-4ceb-a420-512d1ba28760-operator-scripts\") pod \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\" (UID: \"bfff6d4b-eee9-4ceb-a420-512d1ba28760\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.807086 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.807106 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.807120 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.807131 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.814955 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfff6d4b-eee9-4ceb-a420-512d1ba28760-kube-api-access-kbzzj" (OuterVolumeSpecName: "kube-api-access-kbzzj") pod "bfff6d4b-eee9-4ceb-a420-512d1ba28760" (UID: "bfff6d4b-eee9-4ceb-a420-512d1ba28760"). InnerVolumeSpecName "kube-api-access-kbzzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.815428 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfff6d4b-eee9-4ceb-a420-512d1ba28760-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bfff6d4b-eee9-4ceb-a420-512d1ba28760" (UID: "bfff6d4b-eee9-4ceb-a420-512d1ba28760"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.815785 4773 scope.go:117] "RemoveContainer" containerID="1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e" Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.832596 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e\": container with ID starting with 1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e not found: ID does not exist" containerID="1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.832653 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e"} err="failed to get container status \"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e\": rpc error: code = NotFound desc = could not find container \"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e\": container with ID starting with 1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e not found: ID does not exist" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.832681 4773 scope.go:117] "RemoveContainer" containerID="28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.876801 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 12:19:53 crc kubenswrapper[4773]: E0122 12:19:53.877357 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58\": container with ID starting with 28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58 not found: ID does not exist" containerID="28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.877399 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58"} err="failed to get container status \"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58\": rpc error: code = NotFound desc = could not find container \"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58\": container with ID starting with 28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58 not found: ID does not exist" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.877425 4773 scope.go:117] "RemoveContainer" containerID="1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.880855 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e"} err="failed to get container status \"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e\": rpc error: code = NotFound desc = could not find container \"1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e\": container with ID starting with 1ac97d3fbc05cf7bafee02365ba6e8072b0190e37bc6dc6a85511de15808e82e not found: ID does not exist" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.881799 4773 scope.go:117] "RemoveContainer" containerID="28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.892442 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58"} err="failed to get container status \"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58\": rpc error: code = NotFound desc = could not find container \"28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58\": container with ID starting with 28e3f404590e3687f8e87e4072e952d98473da9c97f81c395fa0f7741ae8ab58 not found: ID does not exist" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.892491 4773 scope.go:117] "RemoveContainer" containerID="100e88af17884d8fb4d35904ad9c62fc7acae0e26e17f113782fb3fad537ae96" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.893929 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.911602 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-default\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.912682 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-galera-tls-certs\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.912723 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-generated\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.912958 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-operator-scripts\") pod \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913035 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-kolla-config\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913089 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25ztp\" (UniqueName: \"kubernetes.io/projected/96efe0ff-5c9e-465c-8e86-80035697b7d0-kube-api-access-25ztp\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913121 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913137 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-operator-scripts\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913388 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d367a46c-d551-46d3-b85f-a4499a1c4c0f-operator-scripts\") pod \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913439 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nglsh\" (UniqueName: \"kubernetes.io/projected/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-kube-api-access-nglsh\") pod \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\" (UID: \"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913672 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-combined-ca-bundle\") pod \"96efe0ff-5c9e-465c-8e86-80035697b7d0\" (UID: \"96efe0ff-5c9e-465c-8e86-80035697b7d0\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913706 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hskjt\" (UniqueName: \"kubernetes.io/projected/d367a46c-d551-46d3-b85f-a4499a1c4c0f-kube-api-access-hskjt\") pod \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\" (UID: \"d367a46c-d551-46d3-b85f-a4499a1c4c0f\") " Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.912603 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.913853 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e46f52ea-4d7a-48b9-ac29-fbb7e326a2db" (UID: "e46f52ea-4d7a-48b9-ac29-fbb7e326a2db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.914534 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.915558 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.916194 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfff6d4b-eee9-4ceb-a420-512d1ba28760-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.916233 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.916243 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.916253 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbzzj\" (UniqueName: \"kubernetes.io/projected/bfff6d4b-eee9-4ceb-a420-512d1ba28760-kube-api-access-kbzzj\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.916261 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.916270 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.922032 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.924066 4773 generic.go:334] "Generic (PLEG): container finished" podID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerID="889e6af5b48631940b0b05bbbc915b42895876b4aa8e1b9378483400c385f7a5" exitCode=143 Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.924163 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" event={"ID":"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15","Type":"ContainerDied","Data":"889e6af5b48631940b0b05bbbc915b42895876b4aa8e1b9378483400c385f7a5"} Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.926240 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.926339 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.926578 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d367a46c-d551-46d3-b85f-a4499a1c4c0f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d367a46c-d551-46d3-b85f-a4499a1c4c0f" (UID: "d367a46c-d551-46d3-b85f-a4499a1c4c0f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.927606 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d367a46c-d551-46d3-b85f-a4499a1c4c0f-kube-api-access-hskjt" (OuterVolumeSpecName: "kube-api-access-hskjt") pod "d367a46c-d551-46d3-b85f-a4499a1c4c0f" (UID: "d367a46c-d551-46d3-b85f-a4499a1c4c0f"). InnerVolumeSpecName "kube-api-access-hskjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.938372 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.939631 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-kube-api-access-nglsh" (OuterVolumeSpecName: "kube-api-access-nglsh") pod "e46f52ea-4d7a-48b9-ac29-fbb7e326a2db" (UID: "e46f52ea-4d7a-48b9-ac29-fbb7e326a2db"). InnerVolumeSpecName "kube-api-access-nglsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.950973 4773 generic.go:334] "Generic (PLEG): container finished" podID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerID="87124795355c6f0d6915d6a49102ef60c2f5d2076a9656aff323f785f19478f7" exitCode=143 Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.953442 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" event={"ID":"81a2f40b-f1de-449b-9e0f-45171ffa318c","Type":"ContainerDied","Data":"87124795355c6f0d6915d6a49102ef60c2f5d2076a9656aff323f785f19478f7"} Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.957789 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-82zlq"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.958621 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96efe0ff-5c9e-465c-8e86-80035697b7d0-kube-api-access-25ztp" (OuterVolumeSpecName: "kube-api-access-25ztp") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "kube-api-access-25ztp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.971119 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-82zlq"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.976055 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-tjdqh"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.980128 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.983058 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-tjdqh"] Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.984641 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-eb71-account-create-update-l8944" event={"ID":"fbc373ba-8a4b-44be-a687-63de038d5fa3","Type":"ContainerDied","Data":"dd71f9cbd55aa8d64a6e64f558ed169ca5c6e050894e4edcf87c3d62aa22badf"} Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.984710 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-eb71-account-create-update-l8944" Jan 22 12:19:53 crc kubenswrapper[4773]: I0122 12:19:53.985183 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.011529 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.017208 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smchj\" (UniqueName: \"kubernetes.io/projected/fbc373ba-8a4b-44be-a687-63de038d5fa3-kube-api-access-smchj\") pod \"fbc373ba-8a4b-44be-a687-63de038d5fa3\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.017298 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbc373ba-8a4b-44be-a687-63de038d5fa3-operator-scripts\") pod \"fbc373ba-8a4b-44be-a687-63de038d5fa3\" (UID: \"fbc373ba-8a4b-44be-a687-63de038d5fa3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024529 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d367a46c-d551-46d3-b85f-a4499a1c4c0f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024552 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024561 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nglsh\" (UniqueName: \"kubernetes.io/projected/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db-kube-api-access-nglsh\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024571 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hskjt\" (UniqueName: \"kubernetes.io/projected/d367a46c-d551-46d3-b85f-a4499a1c4c0f-kube-api-access-hskjt\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024580 4773 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/96efe0ff-5c9e-465c-8e86-80035697b7d0-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024588 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25ztp\" (UniqueName: \"kubernetes.io/projected/96efe0ff-5c9e-465c-8e86-80035697b7d0-kube-api-access-25ztp\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024585 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbc373ba-8a4b-44be-a687-63de038d5fa3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fbc373ba-8a4b-44be-a687-63de038d5fa3" (UID: "fbc373ba-8a4b-44be-a687-63de038d5fa3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.024616 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.034840 4773 generic.go:334] "Generic (PLEG): container finished" podID="878a6ce0-f293-4690-9049-c90155c56ff3" containerID="bca2d75d3d19f7bde38ac5639321578fd5fe1d01e28a264520eae5ab44fb18dd" exitCode=0 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.034940 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-77688c4666-rzbr7" event={"ID":"878a6ce0-f293-4690-9049-c90155c56ff3","Type":"ContainerDied","Data":"bca2d75d3d19f7bde38ac5639321578fd5fe1d01e28a264520eae5ab44fb18dd"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.049492 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbc373ba-8a4b-44be-a687-63de038d5fa3-kube-api-access-smchj" (OuterVolumeSpecName: "kube-api-access-smchj") pod "fbc373ba-8a4b-44be-a687-63de038d5fa3" (UID: "fbc373ba-8a4b-44be-a687-63de038d5fa3"). InnerVolumeSpecName "kube-api-access-smchj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.067515 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.074192 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "96efe0ff-5c9e-465c-8e86-80035697b7d0" (UID: "96efe0ff-5c9e-465c-8e86-80035697b7d0"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.075310 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a998-account-create-update-qmwlx" event={"ID":"d5686dfc-618a-4996-a4b0-b1bd31365084","Type":"ContainerDied","Data":"0802e2b1ad6241477fbb5b6a49043e2f9c520a05560405570572bb4997004361"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.087634 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-ef71-account-create-update-p6lhw" event={"ID":"e46f52ea-4d7a-48b9-ac29-fbb7e326a2db","Type":"ContainerDied","Data":"2e152729794ff239a596980d4e052d21e9b06a8b0242d5b92823bd203943d8f7"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.087733 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-ef71-account-create-update-p6lhw" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.094722 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0af4-account-create-update-gctq5" event={"ID":"bfff6d4b-eee9-4ceb-a420-512d1ba28760","Type":"ContainerDied","Data":"7e85a9c4ece38ecb12424cefd99a202564c42b0b73d16ddd94d5b773db5c6d1c"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.094825 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0af4-account-create-update-gctq5" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.114198 4773 generic.go:334] "Generic (PLEG): container finished" podID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerID="aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23" exitCode=0 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.114333 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6f844cff7c-gmg8h" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.114350 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6f844cff7c-gmg8h" event={"ID":"525ebe62-dc27-40fa-97a9-31346c6145a1","Type":"ContainerDied","Data":"aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.114397 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6f844cff7c-gmg8h" event={"ID":"525ebe62-dc27-40fa-97a9-31346c6145a1","Type":"ContainerDied","Data":"88b310776544f69fc2ad3ad17a6ca12d59f34c72942fa6908b7641b40484281d"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.129669 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-8cf3-account-create-update-sg972" event={"ID":"d367a46c-d551-46d3-b85f-a4499a1c4c0f","Type":"ContainerDied","Data":"da393a3992304732dc4631406b6cc24582aca381d3bd6bc28e2dbef1f1eb2d9f"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.129874 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-8cf3-account-create-update-sg972" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.130677 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131495 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-config-data\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131555 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-log-httpd\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131601 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-public-tls-certs\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131681 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-combined-ca-bundle\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131727 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w67f\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-kube-api-access-6w67f\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131757 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-internal-tls-certs\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131840 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-etc-swift\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.131881 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-run-httpd\") pod \"525ebe62-dc27-40fa-97a9-31346c6145a1\" (UID: \"525ebe62-dc27-40fa-97a9-31346c6145a1\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.132279 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.132313 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smchj\" (UniqueName: \"kubernetes.io/projected/fbc373ba-8a4b-44be-a687-63de038d5fa3-kube-api-access-smchj\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.132325 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbc373ba-8a4b-44be-a687-63de038d5fa3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.132335 4773 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/96efe0ff-5c9e-465c-8e86-80035697b7d0-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.137950 4773 scope.go:117] "RemoveContainer" containerID="aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.140941 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.142678 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.145575 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.162813 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.163276 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-kube-api-access-6w67f" (OuterVolumeSpecName: "kube-api-access-6w67f") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "kube-api-access-6w67f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.180261 4773 generic.go:334] "Generic (PLEG): container finished" podID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" exitCode=0 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.180354 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"96efe0ff-5c9e-465c-8e86-80035697b7d0","Type":"ContainerDied","Data":"ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.180379 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"96efe0ff-5c9e-465c-8e86-80035697b7d0","Type":"ContainerDied","Data":"45996e5e28aa895dfe87cd250c2a4f1b1f42cc55d4a8859f7ff7a4470569a489"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.180453 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.200109 4773 generic.go:334] "Generic (PLEG): container finished" podID="1683acba-c129-4a7c-866c-421cdb0e6505" containerID="8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0" exitCode=0 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.200182 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1683acba-c129-4a7c-866c-421cdb0e6505","Type":"ContainerDied","Data":"8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.229082 4773 generic.go:334] "Generic (PLEG): container finished" podID="04725b89-f1ec-45f8-a69a-5427230da499" containerID="f0274aaa2adaf6a4656046810ff93c7b2ab941e940754cbad018d84fab5c25eb" exitCode=0 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.229176 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04725b89-f1ec-45f8-a69a-5427230da499","Type":"ContainerDied","Data":"f0274aaa2adaf6a4656046810ff93c7b2ab941e940754cbad018d84fab5c25eb"} Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.236060 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/878a6ce0-f293-4690-9049-c90155c56ff3-logs\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.236144 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st7c6\" (UniqueName: \"kubernetes.io/projected/878a6ce0-f293-4690-9049-c90155c56ff3-kube-api-access-st7c6\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.236179 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-internal-tls-certs\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.236197 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-public-tls-certs\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.236240 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-scripts\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.236264 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-config-data\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.237478 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gkpw\" (UniqueName: \"kubernetes.io/projected/d5686dfc-618a-4996-a4b0-b1bd31365084-kube-api-access-2gkpw\") pod \"d5686dfc-618a-4996-a4b0-b1bd31365084\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.237528 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-combined-ca-bundle\") pod \"878a6ce0-f293-4690-9049-c90155c56ff3\" (UID: \"878a6ce0-f293-4690-9049-c90155c56ff3\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.237599 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5686dfc-618a-4996-a4b0-b1bd31365084-operator-scripts\") pod \"d5686dfc-618a-4996-a4b0-b1bd31365084\" (UID: \"d5686dfc-618a-4996-a4b0-b1bd31365084\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.238122 4773 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.238139 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.238148 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/525ebe62-dc27-40fa-97a9-31346c6145a1-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.238157 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w67f\" (UniqueName: \"kubernetes.io/projected/525ebe62-dc27-40fa-97a9-31346c6145a1-kube-api-access-6w67f\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.238912 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5686dfc-618a-4996-a4b0-b1bd31365084-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d5686dfc-618a-4996-a4b0-b1bd31365084" (UID: "d5686dfc-618a-4996-a4b0-b1bd31365084"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.247075 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/878a6ce0-f293-4690-9049-c90155c56ff3-logs" (OuterVolumeSpecName: "logs") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.250130 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5686dfc-618a-4996-a4b0-b1bd31365084-kube-api-access-2gkpw" (OuterVolumeSpecName: "kube-api-access-2gkpw") pod "d5686dfc-618a-4996-a4b0-b1bd31365084" (UID: "d5686dfc-618a-4996-a4b0-b1bd31365084"). InnerVolumeSpecName "kube-api-access-2gkpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.252732 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.253318 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-central-agent" containerID="cri-o://a368e5d7422ab66afdc22e40b44df9f6f725ea927e78f7b574782ff86b765058" gracePeriod=30 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.254040 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="proxy-httpd" containerID="cri-o://62b857be6f7515df70b1cb2751883e724fe75bec469449ea75803a2227f3791b" gracePeriod=30 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.254188 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="sg-core" containerID="cri-o://45b87d7500c47bd01c2b3bc23189b837e6db471a85861c82d269d27b75481b8d" gracePeriod=30 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.254273 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-notification-agent" containerID="cri-o://1210d2bb5e517e818502dcfc446a53647a2fe6e83297b49ee849c6965b74a547" gracePeriod=30 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.254601 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-57dd79856-kc66z" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.255989 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8744-account-create-update-4ljts" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.281562 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.281768 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" containerName="kube-state-metrics" containerID="cri-o://55704dc681b7f0b63c484b59d3778f9c323873b4e2e6cccd2b1dc30aecbc8f1e" gracePeriod=30 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.317976 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/878a6ce0-f293-4690-9049-c90155c56ff3-kube-api-access-st7c6" (OuterVolumeSpecName: "kube-api-access-st7c6") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "kube-api-access-st7c6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.318946 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-scripts" (OuterVolumeSpecName: "scripts") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.340168 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5686dfc-618a-4996-a4b0-b1bd31365084-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.340197 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/878a6ce0-f293-4690-9049-c90155c56ff3-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.340207 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st7c6\" (UniqueName: \"kubernetes.io/projected/878a6ce0-f293-4690-9049-c90155c56ff3-kube-api-access-st7c6\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.340216 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.340225 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gkpw\" (UniqueName: \"kubernetes.io/projected/d5686dfc-618a-4996-a4b0-b1bd31365084-kube-api-access-2gkpw\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.351618 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.444487 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.482458 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-1b20-account-create-update-5bbc6"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.520511 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-1b20-account-create-update-5bbc6"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537362 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-1b20-account-create-update-vrv89"] Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.537825 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="mysql-bootstrap" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537838 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="mysql-bootstrap" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.537858 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-log" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537864 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-log" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.537878 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-api" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537884 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-api" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.537898 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-server" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537904 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-server" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.537921 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-httpd" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537926 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-httpd" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.537948 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="galera" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.537954 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="galera" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.538103 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" containerName="galera" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.538116 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-server" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.538124 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" containerName="proxy-httpd" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.538134 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-log" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.538145 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" containerName="placement-api" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.540801 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.545612 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.546268 4773 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.546356 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts podName:fbe5483b-39b1-4c4c-add8-d4ec00c26108 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:58.546343586 +0000 UTC m=+1506.124459411 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts") pod "root-account-create-update-n4t5s" (UID: "fbe5483b-39b1-4c4c-add8-d4ec00c26108") : configmap "openstack-cell1-scripts" not found Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.547316 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": read tcp 10.217.0.2:47300->10.217.0.207:8775: read: connection reset by peer" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.547573 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": read tcp 10.217.0.2:47306->10.217.0.207:8775: read: connection reset by peer" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.553482 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-config-data" (OuterVolumeSpecName: "config-data") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.554667 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-v7xxw"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.571600 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-v7xxw"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.591431 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-pjn7v"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.592296 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-pjn7v"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.595217 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-config-data" (OuterVolumeSpecName: "config-data") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.600960 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-585c9c99b7-xwz6v"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.601231 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-585c9c99b7-xwz6v" podUID="3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" containerName="keystone-api" containerID="cri-o://31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038" gracePeriod=30 Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.605362 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-67f94f9664-cd544" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:57684->10.217.0.163:9311: read: connection reset by peer" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.605571 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-67f94f9664-cd544" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:57676->10.217.0.163:9311: read: connection reset by peer" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.613462 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1b20-account-create-update-vrv89"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.625698 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.631014 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "525ebe62-dc27-40fa-97a9-31346c6145a1" (UID: "525ebe62-dc27-40fa-97a9-31346c6145a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.656592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.664608 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.668898 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.668960 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8rhh\" (UniqueName: \"kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.669066 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.669085 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.669098 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.669117 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/525ebe62-dc27-40fa-97a9-31346c6145a1-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.669126 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.717480 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.753019 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="209de0ff-2316-479c-8ff1-62e1ed260807" path="/var/lib/kubelet/pods/209de0ff-2316-479c-8ff1-62e1ed260807/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.754006 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37262aee-18bc-423e-9dac-272af09de237" path="/var/lib/kubelet/pods/37262aee-18bc-423e-9dac-272af09de237/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.758352 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "878a6ce0-f293-4690-9049-c90155c56ff3" (UID: "878a6ce0-f293-4690-9049-c90155c56ff3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.764180 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40f482a8-ca15-4e6f-a4af-4579c5f66101" path="/var/lib/kubelet/pods/40f482a8-ca15-4e6f-a4af-4579c5f66101/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.764885 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="885b1538-0800-47c6-bedd-284b67cd08ca" path="/var/lib/kubelet/pods/885b1538-0800-47c6-bedd-284b67cd08ca/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.772124 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.772225 4773 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.772961 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8rhh\" (UniqueName: \"kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.772985 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts podName:01b6a60a-2c93-4443-80e5-f3dd77cf7c10 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:55.27296167 +0000 UTC m=+1502.851077495 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts") pod "keystone-1b20-account-create-update-vrv89" (UID: "01b6a60a-2c93-4443-80e5-f3dd77cf7c10") : configmap "openstack-scripts" not found Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.773322 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.773351 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/878a6ce0-f293-4690-9049-c90155c56ff3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.773539 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acfbd9c7-d136-4478-a181-7e9fb3033557" path="/var/lib/kubelet/pods/acfbd9c7-d136-4478-a181-7e9fb3033557/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.774250 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc19e578-7984-4a19-bc3b-dfb8b707886e" path="/var/lib/kubelet/pods/cc19e578-7984-4a19-bc3b-dfb8b707886e/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.776174 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4868de7-6200-4cd9-948b-e0cdbbac5838" path="/var/lib/kubelet/pods/e4868de7-6200-4cd9-948b-e0cdbbac5838/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.776401 4773 projected.go:194] Error preparing data for projected volume kube-api-access-d8rhh for pod openstack/keystone-1b20-account-create-update-vrv89: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.776467 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh podName:01b6a60a-2c93-4443-80e5-f3dd77cf7c10 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:55.276449908 +0000 UTC m=+1502.854565733 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-d8rhh" (UniqueName: "kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh") pod "keystone-1b20-account-create-update-vrv89" (UID: "01b6a60a-2c93-4443-80e5-f3dd77cf7c10") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.780132 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efd60909-f66c-4dc2-948f-5f63c735ab6e" path="/var/lib/kubelet/pods/efd60909-f66c-4dc2-948f-5f63c735ab6e/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.783182 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd0c3eeb-ff3e-4718-932d-6453b4d6f189" path="/var/lib/kubelet/pods/fd0c3eeb-ff3e-4718-932d-6453b4d6f189/volumes" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.789213 4773 scope.go:117] "RemoveContainer" containerID="521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.790131 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.793644 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-fp8kx"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.799955 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-fp8kx"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.800076 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-1b20-account-create-update-vrv89"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.800175 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-97pzr"] Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.874273 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-combined-ca-bundle\") pod \"1683acba-c129-4a7c-866c-421cdb0e6505\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.875059 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg94q\" (UniqueName: \"kubernetes.io/projected/1683acba-c129-4a7c-866c-421cdb0e6505-kube-api-access-qg94q\") pod \"1683acba-c129-4a7c-866c-421cdb0e6505\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.875090 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-config-data\") pod \"1683acba-c129-4a7c-866c-421cdb0e6505\" (UID: \"1683acba-c129-4a7c-866c-421cdb0e6505\") " Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.914746 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1683acba-c129-4a7c-866c-421cdb0e6505-kube-api-access-qg94q" (OuterVolumeSpecName: "kube-api-access-qg94q") pod "1683acba-c129-4a7c-866c-421cdb0e6505" (UID: "1683acba-c129-4a7c-866c-421cdb0e6505"). InnerVolumeSpecName "kube-api-access-qg94q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.921621 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-config-data" (OuterVolumeSpecName: "config-data") pod "1683acba-c129-4a7c-866c-421cdb0e6505" (UID: "1683acba-c129-4a7c-866c-421cdb0e6505"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.931511 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1683acba-c129-4a7c-866c-421cdb0e6505" (UID: "1683acba-c129-4a7c-866c-421cdb0e6505"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.934439 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-97pzr"] Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.967647 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 12:19:54 crc kubenswrapper[4773]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 22 12:19:54 crc kubenswrapper[4773]: Jan 22 12:19:54 crc kubenswrapper[4773]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 22 12:19:54 crc kubenswrapper[4773]: Jan 22 12:19:54 crc kubenswrapper[4773]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 22 12:19:54 crc kubenswrapper[4773]: Jan 22 12:19:54 crc kubenswrapper[4773]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 22 12:19:54 crc kubenswrapper[4773]: Jan 22 12:19:54 crc kubenswrapper[4773]: if [ -n "" ]; then Jan 22 12:19:54 crc kubenswrapper[4773]: GRANT_DATABASE="" Jan 22 12:19:54 crc kubenswrapper[4773]: else Jan 22 12:19:54 crc kubenswrapper[4773]: GRANT_DATABASE="*" Jan 22 12:19:54 crc kubenswrapper[4773]: fi Jan 22 12:19:54 crc kubenswrapper[4773]: Jan 22 12:19:54 crc kubenswrapper[4773]: # going for maximum compatibility here: Jan 22 12:19:54 crc kubenswrapper[4773]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 22 12:19:54 crc kubenswrapper[4773]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 22 12:19:54 crc kubenswrapper[4773]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 22 12:19:54 crc kubenswrapper[4773]: # support updates Jan 22 12:19:54 crc kubenswrapper[4773]: Jan 22 12:19:54 crc kubenswrapper[4773]: $MYSQL_CMD < logger="UnhandledError" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.969077 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-mariadb-root-db-secret\\\" not found\"" pod="openstack/root-account-create-update-97pzr" podUID="bda45287-f56e-4031-addb-068efe159a75" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.981921 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.981955 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg94q\" (UniqueName: \"kubernetes.io/projected/1683acba-c129-4a7c-866c-421cdb0e6505-kube-api-access-qg94q\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.981965 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1683acba-c129-4a7c-866c-421cdb0e6505-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.997154 4773 scope.go:117] "RemoveContainer" containerID="aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23" Jan 22 12:19:54 crc kubenswrapper[4773]: E0122 12:19:54.999116 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23\": container with ID starting with aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23 not found: ID does not exist" containerID="aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.999171 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23"} err="failed to get container status \"aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23\": rpc error: code = NotFound desc = could not find container \"aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23\": container with ID starting with aee88f91e8816d6bd6a9431e8b236642a9b4210f82d3835d5b43ad50cab3ce23 not found: ID does not exist" Jan 22 12:19:54 crc kubenswrapper[4773]: I0122 12:19:54.999196 4773 scope.go:117] "RemoveContainer" containerID="521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.000382 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6\": container with ID starting with 521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6 not found: ID does not exist" containerID="521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.000419 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6"} err="failed to get container status \"521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6\": rpc error: code = NotFound desc = could not find container \"521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6\": container with ID starting with 521549d44714482372b02d7ce753faaca6ca987699d2815bb433305658a9c8f6 not found: ID does not exist" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.000448 4773 scope.go:117] "RemoveContainer" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.035072 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerName="galera" containerID="cri-o://7abeeb25a28ee1cf287a8f8112c5b0ed49bd1977fc5bfc686b83f3ac4dd09463" gracePeriod=30 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.041158 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-57dd79856-kc66z"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.049681 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.056682 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-57dd79856-kc66z"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.059755 4773 scope.go:117] "RemoveContainer" containerID="6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.073849 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.096898 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0af4-account-create-update-gctq5"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.113241 4773 scope.go:117] "RemoveContainer" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.115905 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29\": container with ID starting with ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29 not found: ID does not exist" containerID="ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.115952 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29"} err="failed to get container status \"ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29\": rpc error: code = NotFound desc = could not find container \"ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29\": container with ID starting with ed8f023993b159294e4ecf93f5600cc37f123076d6571d248c250ac6ae28fc29 not found: ID does not exist" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.116507 4773 scope.go:117] "RemoveContainer" containerID="6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.119666 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e\": container with ID starting with 6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e not found: ID does not exist" containerID="6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.119706 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e"} err="failed to get container status \"6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e\": rpc error: code = NotFound desc = could not find container \"6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e\": container with ID starting with 6ceaffe269752f6c4d2b15c8cfc99f623be44b71b604ee5dd39c7d5d0c22911e not found: ID does not exist" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.119797 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.127135 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0af4-account-create-update-gctq5"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.155718 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-sg972"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.164450 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-8cf3-account-create-update-sg972"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189002 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-ef71-account-create-update-p6lhw"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189829 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-public-tls-certs\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189852 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04725b89-f1ec-45f8-a69a-5427230da499-etc-machine-id\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189876 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-combined-ca-bundle\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189914 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-internal-tls-certs\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189945 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts\") pod \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.189966 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-scripts\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190007 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04725b89-f1ec-45f8-a69a-5427230da499-logs\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190032 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg7m6\" (UniqueName: \"kubernetes.io/projected/fbe5483b-39b1-4c4c-add8-d4ec00c26108-kube-api-access-vg7m6\") pod \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\" (UID: \"fbe5483b-39b1-4c4c-add8-d4ec00c26108\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190050 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data-custom\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190070 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tczc4\" (UniqueName: \"kubernetes.io/projected/04725b89-f1ec-45f8-a69a-5427230da499-kube-api-access-tczc4\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190184 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data\") pod \"04725b89-f1ec-45f8-a69a-5427230da499\" (UID: \"04725b89-f1ec-45f8-a69a-5427230da499\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190595 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.190608 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvcs6\" (UniqueName: \"kubernetes.io/projected/8510a6ae-d5e7-4da2-9351-1f0f848d6fef-kube-api-access-fvcs6\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.193682 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fbe5483b-39b1-4c4c-add8-d4ec00c26108" (UID: "fbe5483b-39b1-4c4c-add8-d4ec00c26108"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.202156 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/04725b89-f1ec-45f8-a69a-5427230da499-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.202958 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04725b89-f1ec-45f8-a69a-5427230da499-logs" (OuterVolumeSpecName: "logs") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.206395 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-ef71-account-create-update-p6lhw"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.215604 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-scripts" (OuterVolumeSpecName: "scripts") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.216566 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbe5483b-39b1-4c4c-add8-d4ec00c26108-kube-api-access-vg7m6" (OuterVolumeSpecName: "kube-api-access-vg7m6") pod "fbe5483b-39b1-4c4c-add8-d4ec00c26108" (UID: "fbe5483b-39b1-4c4c-add8-d4ec00c26108"). InnerVolumeSpecName "kube-api-access-vg7m6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.218515 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.219628 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04725b89-f1ec-45f8-a69a-5427230da499-kube-api-access-tczc4" (OuterVolumeSpecName: "kube-api-access-tczc4") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "kube-api-access-tczc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.267778 4773 generic.go:334] "Generic (PLEG): container finished" podID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" containerID="55704dc681b7f0b63c484b59d3778f9c323873b4e2e6cccd2b1dc30aecbc8f1e" exitCode=2 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.267884 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-l8944"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.267908 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd4905ba-f2f9-42c0-a21b-fabf4046af68","Type":"ContainerDied","Data":"55704dc681b7f0b63c484b59d3778f9c323873b4e2e6cccd2b1dc30aecbc8f1e"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.267926 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fd4905ba-f2f9-42c0-a21b-fabf4046af68","Type":"ContainerDied","Data":"cdb59e48e39a3e500054d64b9cd5f42dbe1b8100767be6c020c66e522d3c311d"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.267939 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdb59e48e39a3e500054d64b9cd5f42dbe1b8100767be6c020c66e522d3c311d" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.270799 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1683acba-c129-4a7c-866c-421cdb0e6505","Type":"ContainerDied","Data":"61976de158339a4cc841c341aaa4bdc66e39f398875b71d0a1ee61f7639234b8"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.270831 4773 scope.go:117] "RemoveContainer" containerID="8f2f6fe57896d8e3136683fd067e394563605022d36d5828817ffbe5148b30a0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.270904 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.273644 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-77688c4666-rzbr7" event={"ID":"878a6ce0-f293-4690-9049-c90155c56ff3","Type":"ContainerDied","Data":"9ba9b8d0dfb5620da2a7deab5d6a462091a33d2acd252964575c7a3ceda5b006"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.273745 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-77688c4666-rzbr7" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.276200 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-n4t5s" event={"ID":"fbe5483b-39b1-4c4c-add8-d4ec00c26108","Type":"ContainerDied","Data":"d8f05aadaf974c3a93b2a103e7c43adb2b1649ecbfb482d64bc1bf60fd9dc763"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.276246 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-n4t5s" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.283855 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"04725b89-f1ec-45f8-a69a-5427230da499","Type":"ContainerDied","Data":"b1397fab45be8a037931e17b75ab4a6ea7251bb45086508b972673aa2a07ba2b"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.283951 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.286242 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-eb71-account-create-update-l8944"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.287646 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.288469 4773 generic.go:334] "Generic (PLEG): container finished" podID="311884d0-65e1-4737-a7ba-efc94510b90b" containerID="84743181e9bcf205d60cacb07eb6ab1bf0f5fac0214bec2c405bd822f753600e" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.288529 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"311884d0-65e1-4737-a7ba-efc94510b90b","Type":"ContainerDied","Data":"84743181e9bcf205d60cacb07eb6ab1bf0f5fac0214bec2c405bd822f753600e"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.288559 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"311884d0-65e1-4737-a7ba-efc94510b90b","Type":"ContainerDied","Data":"e659b9e9c635c64463ea7fcb4a22cb02a6f069b613f8ddce465894381961fdaa"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.288573 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e659b9e9c635c64463ea7fcb4a22cb02a6f069b613f8ddce465894381961fdaa" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291378 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqqtd\" (UniqueName: \"kubernetes.io/projected/b9c73637-566a-47b5-bba6-97948a973a47-kube-api-access-jqqtd\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291554 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-httpd-run\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291589 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291694 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-internal-tls-certs\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291727 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-combined-ca-bundle\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291805 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-scripts\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291841 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-logs\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.291930 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-config-data\") pod \"b9c73637-566a-47b5-bba6-97948a973a47\" (UID: \"b9c73637-566a-47b5-bba6-97948a973a47\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292195 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292236 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8rhh\" (UniqueName: \"kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292533 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbe5483b-39b1-4c4c-add8-d4ec00c26108-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292553 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292565 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04725b89-f1ec-45f8-a69a-5427230da499-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292579 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292592 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg7m6\" (UniqueName: \"kubernetes.io/projected/fbe5483b-39b1-4c4c-add8-d4ec00c26108-kube-api-access-vg7m6\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292608 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tczc4\" (UniqueName: \"kubernetes.io/projected/04725b89-f1ec-45f8-a69a-5427230da499-kube-api-access-tczc4\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292620 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/04725b89-f1ec-45f8-a69a-5427230da499-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.292631 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.296786 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data" (OuterVolumeSpecName: "config-data") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.297489 4773 projected.go:194] Error preparing data for projected volume kube-api-access-d8rhh for pod openstack/keystone-1b20-account-create-update-vrv89: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.297590 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh podName:01b6a60a-2c93-4443-80e5-f3dd77cf7c10 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:56.297566755 +0000 UTC m=+1503.875682660 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-d8rhh" (UniqueName: "kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh") pod "keystone-1b20-account-create-update-vrv89" (UID: "01b6a60a-2c93-4443-80e5-f3dd77cf7c10") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.299070 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.303785 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9c73637-566a-47b5-bba6-97948a973a47-kube-api-access-jqqtd" (OuterVolumeSpecName: "kube-api-access-jqqtd") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "kube-api-access-jqqtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.305059 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-4ljts"] Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.305102 4773 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.305149 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts podName:01b6a60a-2c93-4443-80e5-f3dd77cf7c10 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:56.305133187 +0000 UTC m=+1503.883249012 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts") pod "keystone-1b20-account-create-update-vrv89" (UID: "01b6a60a-2c93-4443-80e5-f3dd77cf7c10") : configmap "openstack-scripts" not found Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.305410 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-logs" (OuterVolumeSpecName: "logs") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.306561 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.310017 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8744-account-create-update-4ljts"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.314431 4773 generic.go:334] "Generic (PLEG): container finished" podID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerID="de6c3ff5cc24752d11a5ecf283955ba10ea0171066338d4427fd50840f65e7fb" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.314458 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67f94f9664-cd544" event={"ID":"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e","Type":"ContainerDied","Data":"de6c3ff5cc24752d11a5ecf283955ba10ea0171066338d4427fd50840f65e7fb"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.315483 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6f844cff7c-gmg8h"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.318389 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-scripts" (OuterVolumeSpecName: "scripts") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.320540 4773 generic.go:334] "Generic (PLEG): container finished" podID="b9c73637-566a-47b5-bba6-97948a973a47" containerID="6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.320644 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.320652 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9c73637-566a-47b5-bba6-97948a973a47","Type":"ContainerDied","Data":"6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.320775 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"b9c73637-566a-47b5-bba6-97948a973a47","Type":"ContainerDied","Data":"225a2f34a32f0ca2dad3cbb6e121a6b3d564e544fe5bb8cd325dea73684c25dc"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.321086 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-6f844cff7c-gmg8h"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.321878 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.322459 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-97pzr" event={"ID":"bda45287-f56e-4031-addb-068efe159a75","Type":"ContainerStarted","Data":"c77056a6397905d759d35c1d4ca79b8a19a43d9a4ece6b376b2d990edcdcb18b"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.329399 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.332081 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "04725b89-f1ec-45f8-a69a-5427230da499" (UID: "04725b89-f1ec-45f8-a69a-5427230da499"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.332227 4773 generic.go:334] "Generic (PLEG): container finished" podID="f681589a-ad29-4485-9313-7e63da547635" containerID="b23e36a154e8bd280a582d1bff462b1409937c638edaa9eeeed72ca40585f7bc" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.332287 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f681589a-ad29-4485-9313-7e63da547635","Type":"ContainerDied","Data":"b23e36a154e8bd280a582d1bff462b1409937c638edaa9eeeed72ca40585f7bc"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.332343 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f681589a-ad29-4485-9313-7e63da547635","Type":"ContainerDied","Data":"1ace24924790e623340b7b164a17c7319a67eb86e4eb575bbc7e58973674c6d7"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.332352 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ace24924790e623340b7b164a17c7319a67eb86e4eb575bbc7e58973674c6d7" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.339414 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344186 4773 generic.go:334] "Generic (PLEG): container finished" podID="8d41353c-ea0e-4005-acec-dc25faae5840" containerID="62b857be6f7515df70b1cb2751883e724fe75bec469449ea75803a2227f3791b" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344214 4773 generic.go:334] "Generic (PLEG): container finished" podID="8d41353c-ea0e-4005-acec-dc25faae5840" containerID="45b87d7500c47bd01c2b3bc23189b837e6db471a85861c82d269d27b75481b8d" exitCode=2 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344221 4773 generic.go:334] "Generic (PLEG): container finished" podID="8d41353c-ea0e-4005-acec-dc25faae5840" containerID="1210d2bb5e517e818502dcfc446a53647a2fe6e83297b49ee849c6965b74a547" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344228 4773 generic.go:334] "Generic (PLEG): container finished" podID="8d41353c-ea0e-4005-acec-dc25faae5840" containerID="a368e5d7422ab66afdc22e40b44df9f6f725ea927e78f7b574782ff86b765058" exitCode=0 Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344302 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a998-account-create-update-qmwlx" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344897 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerDied","Data":"62b857be6f7515df70b1cb2751883e724fe75bec469449ea75803a2227f3791b"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344925 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerDied","Data":"45b87d7500c47bd01c2b3bc23189b837e6db471a85861c82d269d27b75481b8d"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344936 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerDied","Data":"1210d2bb5e517e818502dcfc446a53647a2fe6e83297b49ee849c6965b74a547"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.344945 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerDied","Data":"a368e5d7422ab66afdc22e40b44df9f6f725ea927e78f7b574782ff86b765058"} Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.381727 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.386186 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.391247 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-config-data" (OuterVolumeSpecName: "config-data") pod "b9c73637-566a-47b5-bba6-97948a973a47" (UID: "b9c73637-566a-47b5-bba6-97948a973a47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394383 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394405 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394418 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394427 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqqtd\" (UniqueName: \"kubernetes.io/projected/b9c73637-566a-47b5-bba6-97948a973a47-kube-api-access-jqqtd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394435 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9c73637-566a-47b5-bba6-97948a973a47-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394457 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394467 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394476 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/04725b89-f1ec-45f8-a69a-5427230da499-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394485 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394493 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394500 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9c73637-566a-47b5-bba6-97948a973a47-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394508 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5398d068-4617-4d25-bd5e-18fa8ae900cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.394517 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzkhs\" (UniqueName: \"kubernetes.io/projected/5398d068-4617-4d25-bd5e-18fa8ae900cc-kube-api-access-xzkhs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.414207 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.459389 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-d8rhh operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-1b20-account-create-update-vrv89" podUID="01b6a60a-2c93-4443-80e5-f3dd77cf7c10" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.469262 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.495664 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.502118 4773 scope.go:117] "RemoveContainer" containerID="bca2d75d3d19f7bde38ac5639321578fd5fe1d01e28a264520eae5ab44fb18dd" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.507376 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.529780 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.534329 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.545446 4773 scope.go:117] "RemoveContainer" containerID="ed05a9e5edc313b45c9c66ec916cf3d4f450ad7afe2d3bf8f73e940333547c9c" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.557697 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.582635 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.592008 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.622589 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-n4t5s"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631017 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-logs\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631065 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-combined-ca-bundle\") pod \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631109 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-nova-metadata-tls-certs\") pod \"311884d0-65e1-4737-a7ba-efc94510b90b\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631155 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-combined-ca-bundle\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631206 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr222\" (UniqueName: \"kubernetes.io/projected/f681589a-ad29-4485-9313-7e63da547635-kube-api-access-rr222\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631246 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-config-data\") pod \"311884d0-65e1-4737-a7ba-efc94510b90b\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631271 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-combined-ca-bundle\") pod \"311884d0-65e1-4737-a7ba-efc94510b90b\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631331 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf84v\" (UniqueName: \"kubernetes.io/projected/311884d0-65e1-4737-a7ba-efc94510b90b-kube-api-access-jf84v\") pod \"311884d0-65e1-4737-a7ba-efc94510b90b\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631371 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pgw5\" (UniqueName: \"kubernetes.io/projected/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-api-access-2pgw5\") pod \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631397 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631421 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-config-data\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631452 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-httpd-run\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631473 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-config\") pod \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631493 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-scripts\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631511 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-certs\") pod \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\" (UID: \"fd4905ba-f2f9-42c0-a21b-fabf4046af68\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631532 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-public-tls-certs\") pod \"f681589a-ad29-4485-9313-7e63da547635\" (UID: \"f681589a-ad29-4485-9313-7e63da547635\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.631566 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/311884d0-65e1-4737-a7ba-efc94510b90b-logs\") pod \"311884d0-65e1-4737-a7ba-efc94510b90b\" (UID: \"311884d0-65e1-4737-a7ba-efc94510b90b\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.632356 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/311884d0-65e1-4737-a7ba-efc94510b90b-logs" (OuterVolumeSpecName: "logs") pod "311884d0-65e1-4737-a7ba-efc94510b90b" (UID: "311884d0-65e1-4737-a7ba-efc94510b90b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.634422 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-n4t5s"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.643366 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-logs" (OuterVolumeSpecName: "logs") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.643433 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.655411 4773 scope.go:117] "RemoveContainer" containerID="f0274aaa2adaf6a4656046810ff93c7b2ab941e940754cbad018d84fab5c25eb" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.657806 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.658582 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f681589a-ad29-4485-9313-7e63da547635-kube-api-access-rr222" (OuterVolumeSpecName: "kube-api-access-rr222") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "kube-api-access-rr222". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.660906 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-scripts" (OuterVolumeSpecName: "scripts") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.661477 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-api-access-2pgw5" (OuterVolumeSpecName: "kube-api-access-2pgw5") pod "fd4905ba-f2f9-42c0-a21b-fabf4046af68" (UID: "fd4905ba-f2f9-42c0-a21b-fabf4046af68"). InnerVolumeSpecName "kube-api-access-2pgw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.676203 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a998-account-create-update-qmwlx"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.683083 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-a998-account-create-update-qmwlx"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.688019 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-77688c4666-rzbr7"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.694879 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-77688c4666-rzbr7"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.699732 4773 scope.go:117] "RemoveContainer" containerID="a2c743ccc8058456082f15d2a6c949e38df6e8c5dbd52d76888c3f1f8d965d57" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.716811 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.723486 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/311884d0-65e1-4737-a7ba-efc94510b90b-kube-api-access-jf84v" (OuterVolumeSpecName: "kube-api-access-jf84v") pod "311884d0-65e1-4737-a7ba-efc94510b90b" (UID: "311884d0-65e1-4737-a7ba-efc94510b90b"). InnerVolumeSpecName "kube-api-access-jf84v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.726392 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-config-data" (OuterVolumeSpecName: "config-data") pod "311884d0-65e1-4737-a7ba-efc94510b90b" (UID: "311884d0-65e1-4737-a7ba-efc94510b90b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.729400 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "fd4905ba-f2f9-42c0-a21b-fabf4046af68" (UID: "fd4905ba-f2f9-42c0-a21b-fabf4046af68"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.729984 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "311884d0-65e1-4737-a7ba-efc94510b90b" (UID: "311884d0-65e1-4737-a7ba-efc94510b90b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.729851 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd4905ba-f2f9-42c0-a21b-fabf4046af68" (UID: "fd4905ba-f2f9-42c0-a21b-fabf4046af68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.731314 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.736970 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-config-data\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.737066 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd742\" (UniqueName: \"kubernetes.io/projected/8d41353c-ea0e-4005-acec-dc25faae5840-kube-api-access-pd742\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.737233 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8bq2\" (UniqueName: \"kubernetes.io/projected/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-kube-api-access-j8bq2\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.737382 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-public-tls-certs\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.737428 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-internal-tls-certs\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.737563 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-combined-ca-bundle\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.737802 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-ceilometer-tls-certs\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738012 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738066 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-run-httpd\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738121 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data-custom\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738157 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-scripts\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738183 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-logs\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738264 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-log-httpd\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738613 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-combined-ca-bundle\") pod \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\" (UID: \"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738710 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-sg-core-conf-yaml\") pod \"8d41353c-ea0e-4005-acec-dc25faae5840\" (UID: \"8d41353c-ea0e-4005-acec-dc25faae5840\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.738738 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.739988 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740013 4773 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740027 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740039 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/311884d0-65e1-4737-a7ba-efc94510b90b-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740050 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f681589a-ad29-4485-9313-7e63da547635-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740061 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740074 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr222\" (UniqueName: \"kubernetes.io/projected/f681589a-ad29-4485-9313-7e63da547635-kube-api-access-rr222\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740085 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740097 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740108 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740120 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf84v\" (UniqueName: \"kubernetes.io/projected/311884d0-65e1-4737-a7ba-efc94510b90b-kube-api-access-jf84v\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740131 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pgw5\" (UniqueName: \"kubernetes.io/projected/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-api-access-2pgw5\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.740157 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.743398 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-logs" (OuterVolumeSpecName: "logs") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.755175 4773 scope.go:117] "RemoveContainer" containerID="6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.757496 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.759755 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d41353c-ea0e-4005-acec-dc25faae5840-kube-api-access-pd742" (OuterVolumeSpecName: "kube-api-access-pd742") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "kube-api-access-pd742". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.760207 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.764682 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.767604 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-kube-api-access-j8bq2" (OuterVolumeSpecName: "kube-api-access-j8bq2") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "kube-api-access-j8bq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.767813 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-scripts" (OuterVolumeSpecName: "scripts") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.772166 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.809593 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.813357 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.818810 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.822010 4773 scope.go:117] "RemoveContainer" containerID="3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.824701 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843325 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843476 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd742\" (UniqueName: \"kubernetes.io/projected/8d41353c-ea0e-4005-acec-dc25faae5840-kube-api-access-pd742\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843539 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843648 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8bq2\" (UniqueName: \"kubernetes.io/projected/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-kube-api-access-j8bq2\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843708 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843768 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843822 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843873 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.843926 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d41353c-ea0e-4005-acec-dc25faae5840-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.847779 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-config-data" (OuterVolumeSpecName: "config-data") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.850109 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.855961 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "311884d0-65e1-4737-a7ba-efc94510b90b" (UID: "311884d0-65e1-4737-a7ba-efc94510b90b"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.873571 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "fd4905ba-f2f9-42c0-a21b-fabf4046af68" (UID: "fd4905ba-f2f9-42c0-a21b-fabf4046af68"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.875063 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data" (OuterVolumeSpecName: "config-data") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.887633 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.887717 4773 scope.go:117] "RemoveContainer" containerID="6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.891848 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157\": container with ID starting with 6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157 not found: ID does not exist" containerID="6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.891895 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157"} err="failed to get container status \"6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157\": rpc error: code = NotFound desc = could not find container \"6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157\": container with ID starting with 6e5058f7647d7d88bf8a21cccbc0b8b01c2c40459ed5ee5f2691f3f5b1856157 not found: ID does not exist" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.891924 4773 scope.go:117] "RemoveContainer" containerID="3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.891946 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: E0122 12:19:55.892416 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e\": container with ID starting with 3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e not found: ID does not exist" containerID="3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.892462 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e"} err="failed to get container status \"3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e\": rpc error: code = NotFound desc = could not find container \"3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e\": container with ID starting with 3dfcb4bd6224b403fd1ea1d5e784918a520d4eaddf99c8f2c147ba29dd4f733e not found: ID does not exist" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.918460 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" (UID: "62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.937402 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.945537 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bda45287-f56e-4031-addb-068efe159a75-operator-scripts\") pod \"bda45287-f56e-4031-addb-068efe159a75\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.945729 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfdfz\" (UniqueName: \"kubernetes.io/projected/bda45287-f56e-4031-addb-068efe159a75-kube-api-access-xfdfz\") pod \"bda45287-f56e-4031-addb-068efe159a75\" (UID: \"bda45287-f56e-4031-addb-068efe159a75\") " Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946321 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946345 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946354 4773 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946380 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946392 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946402 4773 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd4905ba-f2f9-42c0-a21b-fabf4046af68-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946412 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946421 4773 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/311884d0-65e1-4737-a7ba-efc94510b90b-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946430 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.946818 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bda45287-f56e-4031-addb-068efe159a75-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bda45287-f56e-4031-addb-068efe159a75" (UID: "bda45287-f56e-4031-addb-068efe159a75"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.947241 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f681589a-ad29-4485-9313-7e63da547635" (UID: "f681589a-ad29-4485-9313-7e63da547635"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.949424 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bda45287-f56e-4031-addb-068efe159a75-kube-api-access-xfdfz" (OuterVolumeSpecName: "kube-api-access-xfdfz") pod "bda45287-f56e-4031-addb-068efe159a75" (UID: "bda45287-f56e-4031-addb-068efe159a75"). InnerVolumeSpecName "kube-api-access-xfdfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:55 crc kubenswrapper[4773]: I0122 12:19:55.976547 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-config-data" (OuterVolumeSpecName: "config-data") pod "8d41353c-ea0e-4005-acec-dc25faae5840" (UID: "8d41353c-ea0e-4005-acec-dc25faae5840"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.048712 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bda45287-f56e-4031-addb-068efe159a75-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.048751 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f681589a-ad29-4485-9313-7e63da547635-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.048762 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d41353c-ea0e-4005-acec-dc25faae5840-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.048771 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfdfz\" (UniqueName: \"kubernetes.io/projected/bda45287-f56e-4031-addb-068efe159a75-kube-api-access-xfdfz\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.354617 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.354955 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8rhh\" (UniqueName: \"kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh\") pod \"keystone-1b20-account-create-update-vrv89\" (UID: \"01b6a60a-2c93-4443-80e5-f3dd77cf7c10\") " pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.356150 4773 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.356200 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts podName:01b6a60a-2c93-4443-80e5-f3dd77cf7c10 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:58.35618486 +0000 UTC m=+1505.934300685 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts") pod "keystone-1b20-account-create-update-vrv89" (UID: "01b6a60a-2c93-4443-80e5-f3dd77cf7c10") : configmap "openstack-scripts" not found Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.359119 4773 projected.go:194] Error preparing data for projected volume kube-api-access-d8rhh for pod openstack/keystone-1b20-account-create-update-vrv89: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.359162 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh podName:01b6a60a-2c93-4443-80e5-f3dd77cf7c10 nodeName:}" failed. No retries permitted until 2026-01-22 12:19:58.359151023 +0000 UTC m=+1505.937266848 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-d8rhh" (UniqueName: "kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh") pod "keystone-1b20-account-create-update-vrv89" (UID: "01b6a60a-2c93-4443-80e5-f3dd77cf7c10") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.383374 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-97pzr" event={"ID":"bda45287-f56e-4031-addb-068efe159a75","Type":"ContainerDied","Data":"c77056a6397905d759d35c1d4ca79b8a19a43d9a4ece6b376b2d990edcdcb18b"} Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.383509 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-97pzr" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.399680 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-67f94f9664-cd544" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.399688 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-67f94f9664-cd544" event={"ID":"62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e","Type":"ContainerDied","Data":"d3893ebe0b69c21f423ee65d072031e25359fc316ec91e348695754111d8b041"} Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.399737 4773 scope.go:117] "RemoveContainer" containerID="de6c3ff5cc24752d11a5ecf283955ba10ea0171066338d4427fd50840f65e7fb" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.409879 4773 generic.go:334] "Generic (PLEG): container finished" podID="637a651c-e338-45ba-8bd3-a8f838500893" containerID="430c12aa11f46c2666087aed88a66b65f91c8e79648c0421d3f83f1922bda4ef" exitCode=0 Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.410436 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a651c-e338-45ba-8bd3-a8f838500893","Type":"ContainerDied","Data":"430c12aa11f46c2666087aed88a66b65f91c8e79648c0421d3f83f1922bda4ef"} Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.420825 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.422382 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.422611 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d41353c-ea0e-4005-acec-dc25faae5840","Type":"ContainerDied","Data":"610958e0cb31060554b78ca6249322e42e819841e0e300bd0b2015ffe19b030a"} Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.422640 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.422770 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.426505 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.456458 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.456511 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data podName:a4c14d2f-5507-4d08-be37-55d77b5491a3 nodeName:}" failed. No retries permitted until 2026-01-22 12:20:04.456498528 +0000 UTC m=+1512.034614353 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data") pod "rabbitmq-cell1-server-0" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3") : configmap "rabbitmq-cell1-config-data" not found Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.458450 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-97pzr"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.463436 4773 scope.go:117] "RemoveContainer" containerID="2c623358850ff2ff54ddcf8841a4f6e0e66ef622633842e8bc1dabec1720076d" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.471339 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-97pzr"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.526375 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.541840 4773 scope.go:117] "RemoveContainer" containerID="62b857be6f7515df70b1cb2751883e724fe75bec469449ea75803a2227f3791b" Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.558659 4773 configmap.go:193] Couldn't get configMap openstack/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.558727 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts podName:757f37a0-3cc7-4d57-a956-83d236d6cebc nodeName:}" failed. No retries permitted until 2026-01-22 12:20:04.558710469 +0000 UTC m=+1512.136826294 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts") pod "ovn-controller-rp6dh" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc") : configmap "ovncontroller-scripts" not found Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.558941 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-67f94f9664-cd544"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.570194 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-67f94f9664-cd544"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.575626 4773 scope.go:117] "RemoveContainer" containerID="45b87d7500c47bd01c2b3bc23189b837e6db471a85861c82d269d27b75481b8d" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.590548 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.596807 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.615301 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.626989 4773 scope.go:117] "RemoveContainer" containerID="1210d2bb5e517e818502dcfc446a53647a2fe6e83297b49ee849c6965b74a547" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.641784 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.662748 4773 scope.go:117] "RemoveContainer" containerID="a368e5d7422ab66afdc22e40b44df9f6f725ea927e78f7b574782ff86b765058" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.678636 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04725b89-f1ec-45f8-a69a-5427230da499" path="/var/lib/kubelet/pods/04725b89-f1ec-45f8-a69a-5427230da499/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.679509 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1683acba-c129-4a7c-866c-421cdb0e6505" path="/var/lib/kubelet/pods/1683acba-c129-4a7c-866c-421cdb0e6505/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.680042 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19ea8aed-d681-4685-8e05-3c653517f21f" path="/var/lib/kubelet/pods/19ea8aed-d681-4685-8e05-3c653517f21f/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.681254 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="525ebe62-dc27-40fa-97a9-31346c6145a1" path="/var/lib/kubelet/pods/525ebe62-dc27-40fa-97a9-31346c6145a1/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.681815 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5398d068-4617-4d25-bd5e-18fa8ae900cc" path="/var/lib/kubelet/pods/5398d068-4617-4d25-bd5e-18fa8ae900cc/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.682907 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" path="/var/lib/kubelet/pods/62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.683471 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8510a6ae-d5e7-4da2-9351-1f0f848d6fef" path="/var/lib/kubelet/pods/8510a6ae-d5e7-4da2-9351-1f0f848d6fef/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.684518 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="878a6ce0-f293-4690-9049-c90155c56ff3" path="/var/lib/kubelet/pods/878a6ce0-f293-4690-9049-c90155c56ff3/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.685229 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" path="/var/lib/kubelet/pods/8d41353c-ea0e-4005-acec-dc25faae5840/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.686949 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96efe0ff-5c9e-465c-8e86-80035697b7d0" path="/var/lib/kubelet/pods/96efe0ff-5c9e-465c-8e86-80035697b7d0/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.687639 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9c73637-566a-47b5-bba6-97948a973a47" path="/var/lib/kubelet/pods/b9c73637-566a-47b5-bba6-97948a973a47/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.688448 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bda45287-f56e-4031-addb-068efe159a75" path="/var/lib/kubelet/pods/bda45287-f56e-4031-addb-068efe159a75/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.689444 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfff6d4b-eee9-4ceb-a420-512d1ba28760" path="/var/lib/kubelet/pods/bfff6d4b-eee9-4ceb-a420-512d1ba28760/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.689882 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d367a46c-d551-46d3-b85f-a4499a1c4c0f" path="/var/lib/kubelet/pods/d367a46c-d551-46d3-b85f-a4499a1c4c0f/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.690213 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5686dfc-618a-4996-a4b0-b1bd31365084" path="/var/lib/kubelet/pods/d5686dfc-618a-4996-a4b0-b1bd31365084/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.690628 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e46f52ea-4d7a-48b9-ac29-fbb7e326a2db" path="/var/lib/kubelet/pods/e46f52ea-4d7a-48b9-ac29-fbb7e326a2db/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.691115 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f681589a-ad29-4485-9313-7e63da547635" path="/var/lib/kubelet/pods/f681589a-ad29-4485-9313-7e63da547635/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.692388 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbc373ba-8a4b-44be-a687-63de038d5fa3" path="/var/lib/kubelet/pods/fbc373ba-8a4b-44be-a687-63de038d5fa3/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.692750 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbe5483b-39b1-4c4c-add8-d4ec00c26108" path="/var/lib/kubelet/pods/fbe5483b-39b1-4c4c-add8-d4ec00c26108/volumes" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.693533 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.693563 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.693577 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.693588 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.830203 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.832228 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.836860 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 12:19:56 crc kubenswrapper[4773]: E0122 12:19:56.836952 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerName="nova-cell1-conductor-conductor" Jan 22 12:19:56 crc kubenswrapper[4773]: I0122 12:19:56.858818 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.980888 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-config-data\") pod \"637a651c-e338-45ba-8bd3-a8f838500893\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.980945 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-combined-ca-bundle\") pod \"637a651c-e338-45ba-8bd3-a8f838500893\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.981032 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a651c-e338-45ba-8bd3-a8f838500893-logs\") pod \"637a651c-e338-45ba-8bd3-a8f838500893\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.981065 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-public-tls-certs\") pod \"637a651c-e338-45ba-8bd3-a8f838500893\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.981188 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgjj9\" (UniqueName: \"kubernetes.io/projected/637a651c-e338-45ba-8bd3-a8f838500893-kube-api-access-mgjj9\") pod \"637a651c-e338-45ba-8bd3-a8f838500893\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.981208 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-internal-tls-certs\") pod \"637a651c-e338-45ba-8bd3-a8f838500893\" (UID: \"637a651c-e338-45ba-8bd3-a8f838500893\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:56.981895 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637a651c-e338-45ba-8bd3-a8f838500893-logs" (OuterVolumeSpecName: "logs") pod "637a651c-e338-45ba-8bd3-a8f838500893" (UID: "637a651c-e338-45ba-8bd3-a8f838500893"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.017451 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "637a651c-e338-45ba-8bd3-a8f838500893" (UID: "637a651c-e338-45ba-8bd3-a8f838500893"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.018744 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-config-data" (OuterVolumeSpecName: "config-data") pod "637a651c-e338-45ba-8bd3-a8f838500893" (UID: "637a651c-e338-45ba-8bd3-a8f838500893"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.031726 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/637a651c-e338-45ba-8bd3-a8f838500893-kube-api-access-mgjj9" (OuterVolumeSpecName: "kube-api-access-mgjj9") pod "637a651c-e338-45ba-8bd3-a8f838500893" (UID: "637a651c-e338-45ba-8bd3-a8f838500893"). InnerVolumeSpecName "kube-api-access-mgjj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: E0122 12:19:57.031903 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b is running failed: container process not found" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 12:19:57 crc kubenswrapper[4773]: E0122 12:19:57.032456 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b is running failed: container process not found" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 12:19:57 crc kubenswrapper[4773]: E0122 12:19:57.033098 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b is running failed: container process not found" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 22 12:19:57 crc kubenswrapper[4773]: E0122 12:19:57.033146 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="ovn-northd" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.046253 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "637a651c-e338-45ba-8bd3-a8f838500893" (UID: "637a651c-e338-45ba-8bd3-a8f838500893"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.083584 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/637a651c-e338-45ba-8bd3-a8f838500893-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.083613 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.083622 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgjj9\" (UniqueName: \"kubernetes.io/projected/637a651c-e338-45ba-8bd3-a8f838500893-kube-api-access-mgjj9\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.083630 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.083639 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.095552 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "637a651c-e338-45ba-8bd3-a8f838500893" (UID: "637a651c-e338-45ba-8bd3-a8f838500893"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.185467 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/637a651c-e338-45ba-8bd3-a8f838500893-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.434663 4773 generic.go:334] "Generic (PLEG): container finished" podID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerID="571196d2d3c006d03c5548409716302b0c8d1e601a474e5f6e89f54c68e781d2" exitCode=0 Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.434747 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a4c14d2f-5507-4d08-be37-55d77b5491a3","Type":"ContainerDied","Data":"571196d2d3c006d03c5548409716302b0c8d1e601a474e5f6e89f54c68e781d2"} Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.440078 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"637a651c-e338-45ba-8bd3-a8f838500893","Type":"ContainerDied","Data":"9987a51e74a3e9a9f8bab6d618cb7ae86bff074a415eda858a994cf587cf2051"} Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.440135 4773 scope.go:117] "RemoveContainer" containerID="430c12aa11f46c2666087aed88a66b65f91c8e79648c0421d3f83f1922bda4ef" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.440096 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.446013 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_21901911-8523-4adc-9851-336360f4c11e/ovn-northd/0.log" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.446054 4773 generic.go:334] "Generic (PLEG): container finished" podID="21901911-8523-4adc-9851-336360f4c11e" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" exitCode=139 Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.446107 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"21901911-8523-4adc-9851-336360f4c11e","Type":"ContainerDied","Data":"a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b"} Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.448354 4773 generic.go:334] "Generic (PLEG): container finished" podID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerID="7abeeb25a28ee1cf287a8f8112c5b0ed49bd1977fc5bfc686b83f3ac4dd09463" exitCode=0 Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.448481 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1b20-account-create-update-vrv89" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.448983 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b9643af6-36f5-46b0-9cca-b9fe67a689dd","Type":"ContainerDied","Data":"7abeeb25a28ee1cf287a8f8112c5b0ed49bd1977fc5bfc686b83f3ac4dd09463"} Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.449008 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b9643af6-36f5-46b0-9cca-b9fe67a689dd","Type":"ContainerDied","Data":"d724b280f8c5a9d70986a56a1fa43a9abbb4dd28840d585fa25ec19af314a400"} Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.449019 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d724b280f8c5a9d70986a56a1fa43a9abbb4dd28840d585fa25ec19af314a400" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.546526 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.547703 4773 scope.go:117] "RemoveContainer" containerID="f208616daee705d1c4d908c528f31148f8ced61b33147903c586028760cfb3c8" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.558862 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_21901911-8523-4adc-9851-336360f4c11e/ovn-northd/0.log" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.558932 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.583127 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.588632 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-1b20-account-create-update-vrv89"] Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.596174 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-1b20-account-create-update-vrv89"] Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.604933 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.610247 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695413 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-plugins-conf\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695477 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-erlang-cookie\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695510 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-scripts\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695591 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/21901911-8523-4adc-9851-336360f4c11e-ovn-rundir\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695636 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-config\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695657 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-plugins\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695695 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4c14d2f-5507-4d08-be37-55d77b5491a3-erlang-cookie-secret\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695726 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-server-conf\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695781 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4c14d2f-5507-4d08-be37-55d77b5491a3-pod-info\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695812 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-confd\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695831 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-operator-scripts\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695893 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-tls\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695913 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-default\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695951 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.695978 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696012 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9gvm\" (UniqueName: \"kubernetes.io/projected/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kube-api-access-t9gvm\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696031 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-metrics-certs-tls-certs\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696050 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccjbz\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-kube-api-access-ccjbz\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696068 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-generated\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696116 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kolla-config\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696502 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-scripts" (OuterVolumeSpecName: "scripts") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696746 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21901911-8523-4adc-9851-336360f4c11e-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696761 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696814 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-combined-ca-bundle\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696850 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-galera-tls-certs\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696882 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-combined-ca-bundle\") pod \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\" (UID: \"b9643af6-36f5-46b0-9cca-b9fe67a689dd\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696897 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-ovn-northd-tls-certs\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696924 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"a4c14d2f-5507-4d08-be37-55d77b5491a3\" (UID: \"a4c14d2f-5507-4d08-be37-55d77b5491a3\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.696945 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st5dc\" (UniqueName: \"kubernetes.io/projected/21901911-8523-4adc-9851-336360f4c11e-kube-api-access-st5dc\") pod \"21901911-8523-4adc-9851-336360f4c11e\" (UID: \"21901911-8523-4adc-9851-336360f4c11e\") " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697234 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-config" (OuterVolumeSpecName: "config") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697353 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697369 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697377 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/21901911-8523-4adc-9851-336360f4c11e-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697388 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8rhh\" (UniqueName: \"kubernetes.io/projected/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-kube-api-access-d8rhh\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697398 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01b6a60a-2c93-4443-80e5-f3dd77cf7c10-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697495 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697691 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.697727 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.698180 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.698221 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.699584 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: E0122 12:19:57.702412 4773 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 22 12:19:57 crc kubenswrapper[4773]: E0122 12:19:57.702481 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data podName:a688a76d-2498-4542-8285-709caf211e8a nodeName:}" failed. No retries permitted until 2026-01-22 12:20:05.702460127 +0000 UTC m=+1513.280576042 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data") pod "rabbitmq-server-0" (UID: "a688a76d-2498-4542-8285-709caf211e8a") : configmap "rabbitmq-config-data" not found Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.702893 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a4c14d2f-5507-4d08-be37-55d77b5491a3-pod-info" (OuterVolumeSpecName: "pod-info") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.704826 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.705138 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4c14d2f-5507-4d08-be37-55d77b5491a3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.713842 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21901911-8523-4adc-9851-336360f4c11e-kube-api-access-st5dc" (OuterVolumeSpecName: "kube-api-access-st5dc") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "kube-api-access-st5dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.717229 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-kube-api-access-ccjbz" (OuterVolumeSpecName: "kube-api-access-ccjbz") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "kube-api-access-ccjbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.719598 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.724687 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kube-api-access-t9gvm" (OuterVolumeSpecName: "kube-api-access-t9gvm") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "kube-api-access-t9gvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.727983 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.736080 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data" (OuterVolumeSpecName: "config-data") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.745263 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.771360 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-server-conf" (OuterVolumeSpecName: "server-conf") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.778371 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802410 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21901911-8523-4adc-9851-336360f4c11e-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802433 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802443 4773 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a4c14d2f-5507-4d08-be37-55d77b5491a3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802453 4773 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802462 4773 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a4c14d2f-5507-4d08-be37-55d77b5491a3-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802471 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802479 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802488 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802508 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802520 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802531 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9gvm\" (UniqueName: \"kubernetes.io/projected/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kube-api-access-t9gvm\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802540 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccjbz\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-kube-api-access-ccjbz\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802550 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b9643af6-36f5-46b0-9cca-b9fe67a689dd-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802561 4773 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b9643af6-36f5-46b0-9cca-b9fe67a689dd-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802569 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802577 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802589 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802607 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st5dc\" (UniqueName: \"kubernetes.io/projected/21901911-8523-4adc-9851-336360f4c11e-kube-api-access-st5dc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.802616 4773 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a4c14d2f-5507-4d08-be37-55d77b5491a3-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.808529 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.810663 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "b9643af6-36f5-46b0-9cca-b9fe67a689dd" (UID: "b9643af6-36f5-46b0-9cca-b9fe67a689dd"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.828493 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.835142 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.846911 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a4c14d2f-5507-4d08-be37-55d77b5491a3" (UID: "a4c14d2f-5507-4d08-be37-55d77b5491a3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.847480 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "21901911-8523-4adc-9851-336360f4c11e" (UID: "21901911-8523-4adc-9851-336360f4c11e"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.903591 4773 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9643af6-36f5-46b0-9cca-b9fe67a689dd-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.903618 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.903629 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.903638 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a4c14d2f-5507-4d08-be37-55d77b5491a3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.903646 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:57 crc kubenswrapper[4773]: I0122 12:19:57.903655 4773 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/21901911-8523-4adc-9851-336360f4c11e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.125109 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.199826 4773 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 22 12:19:58 crc kubenswrapper[4773]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-22T12:19:51Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 22 12:19:58 crc kubenswrapper[4773]: /etc/init.d/functions: line 589: 407 Alarm clock "$@" Jan 22 12:19:58 crc kubenswrapper[4773]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-rp6dh" message=< Jan 22 12:19:58 crc kubenswrapper[4773]: Exiting ovn-controller (1) [FAILED] Jan 22 12:19:58 crc kubenswrapper[4773]: Killing ovn-controller (1) [ OK ] Jan 22 12:19:58 crc kubenswrapper[4773]: Killing ovn-controller (1) with SIGKILL [ OK ] Jan 22 12:19:58 crc kubenswrapper[4773]: 2026-01-22T12:19:51Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 22 12:19:58 crc kubenswrapper[4773]: /etc/init.d/functions: line 589: 407 Alarm clock "$@" Jan 22 12:19:58 crc kubenswrapper[4773]: > Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.199869 4773 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 22 12:19:58 crc kubenswrapper[4773]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2026-01-22T12:19:51Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Jan 22 12:19:58 crc kubenswrapper[4773]: /etc/init.d/functions: line 589: 407 Alarm clock "$@" Jan 22 12:19:58 crc kubenswrapper[4773]: > pod="openstack/ovn-controller-rp6dh" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" containerID="cri-o://324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.199946 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-rp6dh" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" containerID="cri-o://324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" gracePeriod=22 Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209024 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-plugins-conf\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209082 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-erlang-cookie\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209111 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-server-conf\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209131 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-plugins\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209186 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209207 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209254 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-tls\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209274 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-confd\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209358 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t2j2\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-kube-api-access-2t2j2\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209439 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a688a76d-2498-4542-8285-709caf211e8a-pod-info\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.209517 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a688a76d-2498-4542-8285-709caf211e8a-erlang-cookie-secret\") pod \"a688a76d-2498-4542-8285-709caf211e8a\" (UID: \"a688a76d-2498-4542-8285-709caf211e8a\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.216053 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.216248 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.218671 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.219261 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.235367 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.237861 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a688a76d-2498-4542-8285-709caf211e8a-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.251782 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a688a76d-2498-4542-8285-709caf211e8a-pod-info" (OuterVolumeSpecName: "pod-info") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.259372 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-kube-api-access-2t2j2" (OuterVolumeSpecName: "kube-api-access-2t2j2") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "kube-api-access-2t2j2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.261440 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data" (OuterVolumeSpecName: "config-data") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.304687 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-server-conf" (OuterVolumeSpecName: "server-conf") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311268 4773 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a688a76d-2498-4542-8285-709caf211e8a-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311333 4773 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a688a76d-2498-4542-8285-709caf211e8a-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311347 4773 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311359 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311371 4773 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311383 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311394 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a688a76d-2498-4542-8285-709caf211e8a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311427 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311441 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.311454 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t2j2\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-kube-api-access-2t2j2\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.329526 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a688a76d-2498-4542-8285-709caf211e8a" (UID: "a688a76d-2498-4542-8285-709caf211e8a"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.334256 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.412838 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.413110 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a688a76d-2498-4542-8285-709caf211e8a-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.443001 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.463408 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_21901911-8523-4adc-9851-336360f4c11e/ovn-northd/0.log" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.463586 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.464465 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"21901911-8523-4adc-9851-336360f4c11e","Type":"ContainerDied","Data":"49d585eb6ef22d582d0ddfbfebfbcd4cce2cb3229ef96d8d2a2301f7bf20b0eb"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.465396 4773 scope.go:117] "RemoveContainer" containerID="c527c488066f9ecb13a4d3372005670c6e133edf7d8f14ab25f338473ff9cd61" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.467813 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-rp6dh_757f37a0-3cc7-4d57-a956-83d236d6cebc/ovn-controller/0.log" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.467857 4773 generic.go:334] "Generic (PLEG): container finished" podID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerID="324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" exitCode=137 Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.467912 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh" event={"ID":"757f37a0-3cc7-4d57-a956-83d236d6cebc","Type":"ContainerDied","Data":"324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.476040 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a4c14d2f-5507-4d08-be37-55d77b5491a3","Type":"ContainerDied","Data":"a98c9817cf5114a40d609ed1a6434be5a0aa603ad2e880ea2d486b0b022c5803"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.476149 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.503700 4773 generic.go:334] "Generic (PLEG): container finished" podID="3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" containerID="31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038" exitCode=0 Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.503806 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-585c9c99b7-xwz6v" event={"ID":"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef","Type":"ContainerDied","Data":"31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.503836 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-585c9c99b7-xwz6v" event={"ID":"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef","Type":"ContainerDied","Data":"d10fd517c03fe43c512120d217a0c4745c2bbc39da2079ad25d1d0a3d62406b2"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.503904 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-585c9c99b7-xwz6v" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.508199 4773 generic.go:334] "Generic (PLEG): container finished" podID="a688a76d-2498-4542-8285-709caf211e8a" containerID="2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c" exitCode=0 Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.508265 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.508313 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a688a76d-2498-4542-8285-709caf211e8a","Type":"ContainerDied","Data":"2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.508345 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a688a76d-2498-4542-8285-709caf211e8a","Type":"ContainerDied","Data":"610009ff10946940eec1d7088721421a698d3ef78d097981f89cdd05c6290df5"} Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.511238 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.513095 4773 scope.go:117] "RemoveContainer" containerID="a5a2900d8f3ec8ba753c4eaa60a956c6d8edc660289208aed85a83444906c24b" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.529668 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.541308 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.549795 4773 scope.go:117] "RemoveContainer" containerID="571196d2d3c006d03c5548409716302b0c8d1e601a474e5f6e89f54c68e781d2" Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.550902 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e is running failed: container process not found" containerID="324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.551447 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e is running failed: container process not found" containerID="324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.552689 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e is running failed: container process not found" containerID="324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.552724 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-rp6dh" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.554073 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.568034 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.588216 4773 scope.go:117] "RemoveContainer" containerID="1c92b816dc78dcade34afa861b434a64234cddd6198a1d85b73b1ca7f992f05c" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.613399 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617408 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-combined-ca-bundle\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617486 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-fernet-keys\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617528 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-scripts\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617576 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-internal-tls-certs\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617622 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-public-tls-certs\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617687 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jccs\" (UniqueName: \"kubernetes.io/projected/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-kube-api-access-9jccs\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617774 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-config-data\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.617808 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-credential-keys\") pod \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\" (UID: \"3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef\") " Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.619121 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.622252 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.622564 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.623130 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.623168 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.624097 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.625818 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-scripts" (OuterVolumeSpecName: "scripts") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.629176 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.629995 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.630134 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.634501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.634779 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-kube-api-access-9jccs" (OuterVolumeSpecName: "kube-api-access-9jccs") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "kube-api-access-9jccs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.636446 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.637422 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.645476 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.652574 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.656635 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.659859 4773 scope.go:117] "RemoveContainer" containerID="31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.659999 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-config-data" (OuterVolumeSpecName: "config-data") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.661694 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" (UID: "3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.667979 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01b6a60a-2c93-4443-80e5-f3dd77cf7c10" path="/var/lib/kubelet/pods/01b6a60a-2c93-4443-80e5-f3dd77cf7c10/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.668469 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21901911-8523-4adc-9851-336360f4c11e" path="/var/lib/kubelet/pods/21901911-8523-4adc-9851-336360f4c11e/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.669103 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" path="/var/lib/kubelet/pods/311884d0-65e1-4737-a7ba-efc94510b90b/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.669700 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="637a651c-e338-45ba-8bd3-a8f838500893" path="/var/lib/kubelet/pods/637a651c-e338-45ba-8bd3-a8f838500893/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.671208 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" path="/var/lib/kubelet/pods/a4c14d2f-5507-4d08-be37-55d77b5491a3/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.672007 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a688a76d-2498-4542-8285-709caf211e8a" path="/var/lib/kubelet/pods/a688a76d-2498-4542-8285-709caf211e8a/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.674166 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" path="/var/lib/kubelet/pods/b9643af6-36f5-46b0-9cca-b9fe67a689dd/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.674761 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" path="/var/lib/kubelet/pods/fd4905ba-f2f9-42c0-a21b-fabf4046af68/volumes" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.684482 4773 scope.go:117] "RemoveContainer" containerID="31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038" Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.685318 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038\": container with ID starting with 31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038 not found: ID does not exist" containerID="31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.685350 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038"} err="failed to get container status \"31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038\": rpc error: code = NotFound desc = could not find container \"31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038\": container with ID starting with 31c8f654fc7fe97b41b77b38806f5d2076b073ea03d352dc5919cdfee9ad2038 not found: ID does not exist" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.685373 4773 scope.go:117] "RemoveContainer" containerID="2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.706185 4773 scope.go:117] "RemoveContainer" containerID="e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719380 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719413 4773 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719422 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719431 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719440 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719450 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719458 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.719467 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jccs\" (UniqueName: \"kubernetes.io/projected/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef-kube-api-access-9jccs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.743024 4773 scope.go:117] "RemoveContainer" containerID="2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c" Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.743766 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c\": container with ID starting with 2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c not found: ID does not exist" containerID="2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.743823 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c"} err="failed to get container status \"2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c\": rpc error: code = NotFound desc = could not find container \"2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c\": container with ID starting with 2495d52a3c6cc3db0f8b3d4246a26ad16514e0b79516c04d720266d790a9bf4c not found: ID does not exist" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.743859 4773 scope.go:117] "RemoveContainer" containerID="e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9" Jan 22 12:19:58 crc kubenswrapper[4773]: E0122 12:19:58.744391 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9\": container with ID starting with e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9 not found: ID does not exist" containerID="e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.744449 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9"} err="failed to get container status \"e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9\": rpc error: code = NotFound desc = could not find container \"e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9\": container with ID starting with e1de773e366c62bf95f18118de0c8136e39a369f67c5d8c6fa06a22d5888c9b9 not found: ID does not exist" Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.840958 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-585c9c99b7-xwz6v"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.849949 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-585c9c99b7-xwz6v"] Jan 22 12:19:58 crc kubenswrapper[4773]: I0122 12:19:58.924443 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.024316 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9w2pz\" (UniqueName: \"kubernetes.io/projected/b9cded2e-ee73-4606-8df8-f52bb4bb139d-kube-api-access-9w2pz\") pod \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.024675 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-combined-ca-bundle\") pod \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.024758 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-config-data\") pod \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\" (UID: \"b9cded2e-ee73-4606-8df8-f52bb4bb139d\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.028220 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9cded2e-ee73-4606-8df8-f52bb4bb139d-kube-api-access-9w2pz" (OuterVolumeSpecName: "kube-api-access-9w2pz") pod "b9cded2e-ee73-4606-8df8-f52bb4bb139d" (UID: "b9cded2e-ee73-4606-8df8-f52bb4bb139d"). InnerVolumeSpecName "kube-api-access-9w2pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.047711 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-config-data" (OuterVolumeSpecName: "config-data") pod "b9cded2e-ee73-4606-8df8-f52bb4bb139d" (UID: "b9cded2e-ee73-4606-8df8-f52bb4bb139d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.056279 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9cded2e-ee73-4606-8df8-f52bb4bb139d" (UID: "b9cded2e-ee73-4606-8df8-f52bb4bb139d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.108021 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-rp6dh_757f37a0-3cc7-4d57-a956-83d236d6cebc/ovn-controller/0.log" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.108109 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.132086 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9w2pz\" (UniqueName: \"kubernetes.io/projected/b9cded2e-ee73-4606-8df8-f52bb4bb139d-kube-api-access-9w2pz\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.132120 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.132148 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9cded2e-ee73-4606-8df8-f52bb4bb139d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.232859 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwv5w\" (UniqueName: \"kubernetes.io/projected/757f37a0-3cc7-4d57-a956-83d236d6cebc-kube-api-access-mwv5w\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.232960 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-combined-ca-bundle\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.233006 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.233054 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-ovn-controller-tls-certs\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.233082 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-log-ovn\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.233116 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run-ovn\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.233161 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts\") pod \"757f37a0-3cc7-4d57-a956-83d236d6cebc\" (UID: \"757f37a0-3cc7-4d57-a956-83d236d6cebc\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.234791 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.234961 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.235113 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run" (OuterVolumeSpecName: "var-run") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.236802 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts" (OuterVolumeSpecName: "scripts") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.238862 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/757f37a0-3cc7-4d57-a956-83d236d6cebc-kube-api-access-mwv5w" (OuterVolumeSpecName: "kube-api-access-mwv5w") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "kube-api-access-mwv5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.257990 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.319497 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "757f37a0-3cc7-4d57-a956-83d236d6cebc" (UID: "757f37a0-3cc7-4d57-a956-83d236d6cebc"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335710 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335740 4773 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335753 4773 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335765 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/757f37a0-3cc7-4d57-a956-83d236d6cebc-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335775 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwv5w\" (UniqueName: \"kubernetes.io/projected/757f37a0-3cc7-4d57-a956-83d236d6cebc-kube-api-access-mwv5w\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335785 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/757f37a0-3cc7-4d57-a956-83d236d6cebc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.335793 4773 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/757f37a0-3cc7-4d57-a956-83d236d6cebc-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.345960 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.437784 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8cdh\" (UniqueName: \"kubernetes.io/projected/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-kube-api-access-x8cdh\") pod \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.437892 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-logs\") pod \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.437919 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data-custom\") pod \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.437960 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data\") pod \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.438009 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-combined-ca-bundle\") pod \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\" (UID: \"a5dc7991-ffde-4ef2-9668-e07d7c4aa614\") " Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.439178 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-logs" (OuterVolumeSpecName: "logs") pod "a5dc7991-ffde-4ef2-9668-e07d7c4aa614" (UID: "a5dc7991-ffde-4ef2-9668-e07d7c4aa614"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.442302 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-kube-api-access-x8cdh" (OuterVolumeSpecName: "kube-api-access-x8cdh") pod "a5dc7991-ffde-4ef2-9668-e07d7c4aa614" (UID: "a5dc7991-ffde-4ef2-9668-e07d7c4aa614"). InnerVolumeSpecName "kube-api-access-x8cdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.442548 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a5dc7991-ffde-4ef2-9668-e07d7c4aa614" (UID: "a5dc7991-ffde-4ef2-9668-e07d7c4aa614"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.457458 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5dc7991-ffde-4ef2-9668-e07d7c4aa614" (UID: "a5dc7991-ffde-4ef2-9668-e07d7c4aa614"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.483241 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data" (OuterVolumeSpecName: "config-data") pod "a5dc7991-ffde-4ef2-9668-e07d7c4aa614" (UID: "a5dc7991-ffde-4ef2-9668-e07d7c4aa614"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.521515 4773 generic.go:334] "Generic (PLEG): container finished" podID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" exitCode=0 Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.521596 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b9cded2e-ee73-4606-8df8-f52bb4bb139d","Type":"ContainerDied","Data":"43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01"} Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.521623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b9cded2e-ee73-4606-8df8-f52bb4bb139d","Type":"ContainerDied","Data":"2e0e5eb9032308c422b9fede32596dee75d172876679f67fbf73f3085790fb72"} Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.521629 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.521661 4773 scope.go:117] "RemoveContainer" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.524713 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-rp6dh_757f37a0-3cc7-4d57-a956-83d236d6cebc/ovn-controller/0.log" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.524815 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-rp6dh" event={"ID":"757f37a0-3cc7-4d57-a956-83d236d6cebc","Type":"ContainerDied","Data":"606566cb816ad78e3a1f2afdbbf4e015fcfdfbf9f931d601283018af434bb85b"} Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.524850 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-rp6dh" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.529719 4773 generic.go:334] "Generic (PLEG): container finished" podID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerID="4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec" exitCode=0 Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.529793 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" event={"ID":"a5dc7991-ffde-4ef2-9668-e07d7c4aa614","Type":"ContainerDied","Data":"4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec"} Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.529822 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" event={"ID":"a5dc7991-ffde-4ef2-9668-e07d7c4aa614","Type":"ContainerDied","Data":"d4bbf41adb87935d04b3119f36e9161d98c445515e323bebefc418d38c64e3ff"} Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.529885 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7d56c9f5dc-qd9zs" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.539378 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.539400 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.539412 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8cdh\" (UniqueName: \"kubernetes.io/projected/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-kube-api-access-x8cdh\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.539423 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.539434 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5dc7991-ffde-4ef2-9668-e07d7c4aa614-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.555261 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.169:8776/healthcheck\": dial tcp 10.217.0.169:8776: i/o timeout" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.560941 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.571613 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.578696 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-rp6dh"] Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.587607 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-rp6dh"] Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.588500 4773 scope.go:117] "RemoveContainer" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.594239 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7d56c9f5dc-qd9zs"] Jan 22 12:19:59 crc kubenswrapper[4773]: E0122 12:19:59.594802 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01\": container with ID starting with 43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01 not found: ID does not exist" containerID="43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.594851 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01"} err="failed to get container status \"43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01\": rpc error: code = NotFound desc = could not find container \"43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01\": container with ID starting with 43101b119afcdef21c580975d02a69a753cc73834041f83f630e3b6a0cf4ac01 not found: ID does not exist" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.594882 4773 scope.go:117] "RemoveContainer" containerID="324d59fd90ba3b7ee19caa650ea315b885dd36e757be3f412677159407ce6c1e" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.599244 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-7d56c9f5dc-qd9zs"] Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.627046 4773 scope.go:117] "RemoveContainer" containerID="4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.659079 4773 scope.go:117] "RemoveContainer" containerID="7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.678076 4773 scope.go:117] "RemoveContainer" containerID="4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec" Jan 22 12:19:59 crc kubenswrapper[4773]: E0122 12:19:59.678488 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec\": container with ID starting with 4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec not found: ID does not exist" containerID="4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.678519 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec"} err="failed to get container status \"4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec\": rpc error: code = NotFound desc = could not find container \"4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec\": container with ID starting with 4624405b99d73b299a8828478704896ad836a6a31e3b351117d416ec3a4599ec not found: ID does not exist" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.678546 4773 scope.go:117] "RemoveContainer" containerID="7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527" Jan 22 12:19:59 crc kubenswrapper[4773]: E0122 12:19:59.678956 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527\": container with ID starting with 7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527 not found: ID does not exist" containerID="7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527" Jan 22 12:19:59 crc kubenswrapper[4773]: I0122 12:19:59.679018 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527"} err="failed to get container status \"7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527\": rpc error: code = NotFound desc = could not find container \"7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527\": container with ID starting with 7e617c6963bb509b84049d20e6f222d4d651113da5d06d4b6d6a4307baa6b527 not found: ID does not exist" Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.147418 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" containerName="kube-state-metrics" probeResult="failure" output="Get \"https://10.217.0.198:8081/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.492925 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.493225 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" containerName="memcached" containerID="cri-o://c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155" gracePeriod=30 Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.554107 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.554327 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" containerName="nova-scheduler-scheduler" containerID="cri-o://51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" gracePeriod=30 Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.669759 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" path="/var/lib/kubelet/pods/3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef/volumes" Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.670522 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" path="/var/lib/kubelet/pods/757f37a0-3cc7-4d57-a956-83d236d6cebc/volumes" Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.671326 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" path="/var/lib/kubelet/pods/a5dc7991-ffde-4ef2-9668-e07d7c4aa614/volumes" Jan 22 12:20:00 crc kubenswrapper[4773]: I0122 12:20:00.672389 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" path="/var/lib/kubelet/pods/b9cded2e-ee73-4606-8df8-f52bb4bb139d/volumes" Jan 22 12:20:01 crc kubenswrapper[4773]: E0122 12:20:01.930228 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:20:01 crc kubenswrapper[4773]: E0122 12:20:01.935566 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:20:01 crc kubenswrapper[4773]: E0122 12:20:01.938789 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 12:20:01 crc kubenswrapper[4773]: E0122 12:20:01.938895 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" containerName="nova-scheduler-scheduler" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.247361 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391207 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kolla-config\") pod \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391257 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-config-data\") pod \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391311 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-combined-ca-bundle\") pod \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391349 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-memcached-tls-certs\") pod \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391406 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tszbj\" (UniqueName: \"kubernetes.io/projected/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kube-api-access-tszbj\") pod \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\" (UID: \"384ccba5-b841-48d7-bdb6-ad40c08d6c8f\") " Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391973 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "384ccba5-b841-48d7-bdb6-ad40c08d6c8f" (UID: "384ccba5-b841-48d7-bdb6-ad40c08d6c8f"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.391998 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-config-data" (OuterVolumeSpecName: "config-data") pod "384ccba5-b841-48d7-bdb6-ad40c08d6c8f" (UID: "384ccba5-b841-48d7-bdb6-ad40c08d6c8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.415483 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kube-api-access-tszbj" (OuterVolumeSpecName: "kube-api-access-tszbj") pod "384ccba5-b841-48d7-bdb6-ad40c08d6c8f" (UID: "384ccba5-b841-48d7-bdb6-ad40c08d6c8f"). InnerVolumeSpecName "kube-api-access-tszbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.434905 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "384ccba5-b841-48d7-bdb6-ad40c08d6c8f" (UID: "384ccba5-b841-48d7-bdb6-ad40c08d6c8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.437850 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "384ccba5-b841-48d7-bdb6-ad40c08d6c8f" (UID: "384ccba5-b841-48d7-bdb6-ad40c08d6c8f"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.493314 4773 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.493347 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.493357 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.493367 4773 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.493379 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tszbj\" (UniqueName: \"kubernetes.io/projected/384ccba5-b841-48d7-bdb6-ad40c08d6c8f-kube-api-access-tszbj\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.577516 4773 generic.go:334] "Generic (PLEG): container finished" podID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" containerID="c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155" exitCode=0 Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.577558 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"384ccba5-b841-48d7-bdb6-ad40c08d6c8f","Type":"ContainerDied","Data":"c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155"} Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.577586 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"384ccba5-b841-48d7-bdb6-ad40c08d6c8f","Type":"ContainerDied","Data":"bbd855a897f78e0df5ed5f44b7cc8cc23da99e1fc29e60782dab409a668ce5f8"} Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.577604 4773 scope.go:117] "RemoveContainer" containerID="c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.577598 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.611548 4773 scope.go:117] "RemoveContainer" containerID="c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155" Jan 22 12:20:02 crc kubenswrapper[4773]: E0122 12:20:02.614570 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155\": container with ID starting with c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155 not found: ID does not exist" containerID="c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.614628 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155"} err="failed to get container status \"c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155\": rpc error: code = NotFound desc = could not find container \"c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155\": container with ID starting with c6a6d375982d6ece17ea4a974ce653a46efdb3e163bbbca7a078792348a6a155 not found: ID does not exist" Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.619474 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.627762 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Jan 22 12:20:02 crc kubenswrapper[4773]: I0122 12:20:02.671842 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" path="/var/lib/kubelet/pods/384ccba5-b841-48d7-bdb6-ad40c08d6c8f/volumes" Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.618506 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.621321 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.621446 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.622081 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.622140 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.624102 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.625701 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:03 crc kubenswrapper[4773]: E0122 12:20:03.625733 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:04 crc kubenswrapper[4773]: I0122 12:20:04.073870 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:20:04 crc kubenswrapper[4773]: I0122 12:20:04.074431 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.516098 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.618623 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" exitCode=0 Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.618664 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d8dfee1b-6f2f-40c0-b676-64e94df5f64d","Type":"ContainerDied","Data":"51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de"} Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.618690 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d8dfee1b-6f2f-40c0-b676-64e94df5f64d","Type":"ContainerDied","Data":"19088b3d43e1967e69bac645fe9dc78a46c2dd8bce7f72a51c2b7409eec1a16b"} Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.618706 4773 scope.go:117] "RemoveContainer" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.618844 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.637753 4773 scope.go:117] "RemoveContainer" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" Jan 22 12:20:06 crc kubenswrapper[4773]: E0122 12:20:06.638668 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de\": container with ID starting with 51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de not found: ID does not exist" containerID="51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.638716 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de"} err="failed to get container status \"51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de\": rpc error: code = NotFound desc = could not find container \"51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de\": container with ID starting with 51afb15c9b5099562ab106b026285ab7ba955147395bf085d62a4e072f75e4de not found: ID does not exist" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.675077 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9rs5\" (UniqueName: \"kubernetes.io/projected/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-kube-api-access-z9rs5\") pod \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.675124 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-config-data\") pod \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.675160 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-combined-ca-bundle\") pod \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\" (UID: \"d8dfee1b-6f2f-40c0-b676-64e94df5f64d\") " Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.680386 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-kube-api-access-z9rs5" (OuterVolumeSpecName: "kube-api-access-z9rs5") pod "d8dfee1b-6f2f-40c0-b676-64e94df5f64d" (UID: "d8dfee1b-6f2f-40c0-b676-64e94df5f64d"). InnerVolumeSpecName "kube-api-access-z9rs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.700562 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8dfee1b-6f2f-40c0-b676-64e94df5f64d" (UID: "d8dfee1b-6f2f-40c0-b676-64e94df5f64d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.701294 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-config-data" (OuterVolumeSpecName: "config-data") pod "d8dfee1b-6f2f-40c0-b676-64e94df5f64d" (UID: "d8dfee1b-6f2f-40c0-b676-64e94df5f64d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.777117 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9rs5\" (UniqueName: \"kubernetes.io/projected/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-kube-api-access-z9rs5\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.777408 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.777419 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8dfee1b-6f2f-40c0-b676-64e94df5f64d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.953413 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:20:06 crc kubenswrapper[4773]: I0122 12:20:06.959855 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.238793 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386033 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-internal-tls-certs\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386089 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-config\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386159 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-public-tls-certs\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386229 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk6d8\" (UniqueName: \"kubernetes.io/projected/a393de80-9ad0-413e-a2a1-6ee14de22049-kube-api-access-zk6d8\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386331 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-combined-ca-bundle\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386360 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-httpd-config\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.386388 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-ovndb-tls-certs\") pod \"a393de80-9ad0-413e-a2a1-6ee14de22049\" (UID: \"a393de80-9ad0-413e-a2a1-6ee14de22049\") " Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.390709 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a393de80-9ad0-413e-a2a1-6ee14de22049-kube-api-access-zk6d8" (OuterVolumeSpecName: "kube-api-access-zk6d8") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "kube-api-access-zk6d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.397767 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.425687 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.439463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.442249 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.453841 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.462972 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-config" (OuterVolumeSpecName: "config") pod "a393de80-9ad0-413e-a2a1-6ee14de22049" (UID: "a393de80-9ad0-413e-a2a1-6ee14de22049"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.488702 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.488998 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.489156 4773 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.489237 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.489333 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-config\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.489411 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a393de80-9ad0-413e-a2a1-6ee14de22049-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.489491 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk6d8\" (UniqueName: \"kubernetes.io/projected/a393de80-9ad0-413e-a2a1-6ee14de22049-kube-api-access-zk6d8\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.629818 4773 generic.go:334] "Generic (PLEG): container finished" podID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerID="e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b" exitCode=0 Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.630661 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c54fcb95c-24djg" event={"ID":"a393de80-9ad0-413e-a2a1-6ee14de22049","Type":"ContainerDied","Data":"e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b"} Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.630780 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c54fcb95c-24djg" event={"ID":"a393de80-9ad0-413e-a2a1-6ee14de22049","Type":"ContainerDied","Data":"c8ec69fd30ff3f86ecbbe357cf9c7288931466d7ad02d6e9d6eb30e9b599a11b"} Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.630874 4773 scope.go:117] "RemoveContainer" containerID="b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.631063 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c54fcb95c-24djg" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.655926 4773 scope.go:117] "RemoveContainer" containerID="e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.688637 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5c54fcb95c-24djg"] Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.688740 4773 scope.go:117] "RemoveContainer" containerID="b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37" Jan 22 12:20:07 crc kubenswrapper[4773]: E0122 12:20:07.689074 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37\": container with ID starting with b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37 not found: ID does not exist" containerID="b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.689141 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37"} err="failed to get container status \"b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37\": rpc error: code = NotFound desc = could not find container \"b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37\": container with ID starting with b02f864bc31ff6b515389a47f6d522f1429809938c2225e47c25f5a54c54ef37 not found: ID does not exist" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.689187 4773 scope.go:117] "RemoveContainer" containerID="e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b" Jan 22 12:20:07 crc kubenswrapper[4773]: E0122 12:20:07.689632 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b\": container with ID starting with e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b not found: ID does not exist" containerID="e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.689670 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b"} err="failed to get container status \"e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b\": rpc error: code = NotFound desc = could not find container \"e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b\": container with ID starting with e4758759a8e6e7a9d38053a3b2d48dd8294e18fc3ff82b5caecad2b27736586b not found: ID does not exist" Jan 22 12:20:07 crc kubenswrapper[4773]: I0122 12:20:07.692328 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5c54fcb95c-24djg"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.619233 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.620212 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.620840 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.621220 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.621335 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.622801 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.624472 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:08 crc kubenswrapper[4773]: E0122 12:20:08.624521 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:08 crc kubenswrapper[4773]: I0122 12:20:08.670474 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" path="/var/lib/kubelet/pods/a393de80-9ad0-413e-a2a1-6ee14de22049/volumes" Jan 22 12:20:08 crc kubenswrapper[4773]: I0122 12:20:08.671342 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" path="/var/lib/kubelet/pods/d8dfee1b-6f2f-40c0-b676-64e94df5f64d/volumes" Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.618487 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.619800 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.620850 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.620906 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.620973 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.622915 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.625032 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:13 crc kubenswrapper[4773]: E0122 12:20:13.625108 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.618601 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.620016 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.620065 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.620505 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.620550 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.622641 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.624955 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 22 12:20:18 crc kubenswrapper[4773]: E0122 12:20:18.625002 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fj6wj" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.802681 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fj6wj_7865c5e1-9bcc-467f-8a58-1adfaf30ed28/ovs-vswitchd/0.log" Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.805236 4773 generic.go:334] "Generic (PLEG): container finished" podID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" exitCode=137 Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.805349 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerDied","Data":"42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f"} Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.820343 4773 generic.go:334] "Generic (PLEG): container finished" podID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerID="703a45160239152c8b7d740aeea8cc73cd6c09b0a35fa470daa71bdf7f10e577" exitCode=137 Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.820418 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"703a45160239152c8b7d740aeea8cc73cd6c09b0a35fa470daa71bdf7f10e577"} Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.823840 4773 generic.go:334] "Generic (PLEG): container finished" podID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerID="1479304cf57df7fc13f187bc0a84e2fe6c7845f988d1d96f82b74355bc8045dd" exitCode=137 Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.823866 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"aed7f398-6b73-4830-aa88-db87be2b99a0","Type":"ContainerDied","Data":"1479304cf57df7fc13f187bc0a84e2fe6c7845f988d1d96f82b74355bc8045dd"} Jan 22 12:20:20 crc kubenswrapper[4773]: I0122 12:20:20.993263 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.024080 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.121378 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aed7f398-6b73-4830-aa88-db87be2b99a0-etc-machine-id\") pod \"aed7f398-6b73-4830-aa88-db87be2b99a0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.121420 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-combined-ca-bundle\") pod \"aed7f398-6b73-4830-aa88-db87be2b99a0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.121460 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") pod \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.121523 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-lock\") pod \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.121543 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aed7f398-6b73-4830-aa88-db87be2b99a0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "aed7f398-6b73-4830-aa88-db87be2b99a0" (UID: "aed7f398-6b73-4830-aa88-db87be2b99a0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122057 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-lock" (OuterVolumeSpecName: "lock") pod "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122318 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwbjb\" (UniqueName: \"kubernetes.io/projected/aed7f398-6b73-4830-aa88-db87be2b99a0-kube-api-access-hwbjb\") pod \"aed7f398-6b73-4830-aa88-db87be2b99a0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122348 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-cache\") pod \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122386 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-scripts\") pod \"aed7f398-6b73-4830-aa88-db87be2b99a0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122419 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jmb8\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-kube-api-access-5jmb8\") pod \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122466 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data-custom\") pod \"aed7f398-6b73-4830-aa88-db87be2b99a0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122508 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122536 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data\") pod \"aed7f398-6b73-4830-aa88-db87be2b99a0\" (UID: \"aed7f398-6b73-4830-aa88-db87be2b99a0\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122558 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-combined-ca-bundle\") pod \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\" (UID: \"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122782 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-cache" (OuterVolumeSpecName: "cache") pod "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122798 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aed7f398-6b73-4830-aa88-db87be2b99a0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.122840 4773 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-lock\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.127211 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-scripts" (OuterVolumeSpecName: "scripts") pod "aed7f398-6b73-4830-aa88-db87be2b99a0" (UID: "aed7f398-6b73-4830-aa88-db87be2b99a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.127734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.128005 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-kube-api-access-5jmb8" (OuterVolumeSpecName: "kube-api-access-5jmb8") pod "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c"). InnerVolumeSpecName "kube-api-access-5jmb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.128423 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aed7f398-6b73-4830-aa88-db87be2b99a0" (UID: "aed7f398-6b73-4830-aa88-db87be2b99a0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.128565 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed7f398-6b73-4830-aa88-db87be2b99a0-kube-api-access-hwbjb" (OuterVolumeSpecName: "kube-api-access-hwbjb") pod "aed7f398-6b73-4830-aa88-db87be2b99a0" (UID: "aed7f398-6b73-4830-aa88-db87be2b99a0"). InnerVolumeSpecName "kube-api-access-hwbjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.129449 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "swift") pod "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.170458 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aed7f398-6b73-4830-aa88-db87be2b99a0" (UID: "aed7f398-6b73-4830-aa88-db87be2b99a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.211747 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data" (OuterVolumeSpecName: "config-data") pod "aed7f398-6b73-4830-aa88-db87be2b99a0" (UID: "aed7f398-6b73-4830-aa88-db87be2b99a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224328 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224356 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jmb8\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-kube-api-access-5jmb8\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224369 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224399 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224409 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224420 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed7f398-6b73-4830-aa88-db87be2b99a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224428 4773 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224436 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwbjb\" (UniqueName: \"kubernetes.io/projected/aed7f398-6b73-4830-aa88-db87be2b99a0-kube-api-access-hwbjb\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.224443 4773 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-cache\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.237626 4773 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.279215 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fj6wj_7865c5e1-9bcc-467f-8a58-1adfaf30ed28/ovs-vswitchd/0.log" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.280074 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.325215 4773 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.410160 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" (UID: "6d5ae4d3-bfc2-4d06-a84e-dc56e250514c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.426815 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-scripts\") pod \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.426919 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-log\") pod \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.426965 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-etc-ovs\") pod \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.426998 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-lib\") pod \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427027 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pg8p\" (UniqueName: \"kubernetes.io/projected/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-kube-api-access-4pg8p\") pod \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427051 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-run\") pod \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\" (UID: \"7865c5e1-9bcc-467f-8a58-1adfaf30ed28\") " Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427110 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-lib" (OuterVolumeSpecName: "var-lib") pod "7865c5e1-9bcc-467f-8a58-1adfaf30ed28" (UID: "7865c5e1-9bcc-467f-8a58-1adfaf30ed28"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427104 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-log" (OuterVolumeSpecName: "var-log") pod "7865c5e1-9bcc-467f-8a58-1adfaf30ed28" (UID: "7865c5e1-9bcc-467f-8a58-1adfaf30ed28"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427137 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "7865c5e1-9bcc-467f-8a58-1adfaf30ed28" (UID: "7865c5e1-9bcc-467f-8a58-1adfaf30ed28"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427227 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-run" (OuterVolumeSpecName: "var-run") pod "7865c5e1-9bcc-467f-8a58-1adfaf30ed28" (UID: "7865c5e1-9bcc-467f-8a58-1adfaf30ed28"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427789 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427819 4773 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427831 4773 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427842 4773 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-lib\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.427854 4773 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.428108 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-scripts" (OuterVolumeSpecName: "scripts") pod "7865c5e1-9bcc-467f-8a58-1adfaf30ed28" (UID: "7865c5e1-9bcc-467f-8a58-1adfaf30ed28"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.429808 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-kube-api-access-4pg8p" (OuterVolumeSpecName: "kube-api-access-4pg8p") pod "7865c5e1-9bcc-467f-8a58-1adfaf30ed28" (UID: "7865c5e1-9bcc-467f-8a58-1adfaf30ed28"). InnerVolumeSpecName "kube-api-access-4pg8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.529998 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.530096 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pg8p\" (UniqueName: \"kubernetes.io/projected/7865c5e1-9bcc-467f-8a58-1adfaf30ed28-kube-api-access-4pg8p\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.841963 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6d5ae4d3-bfc2-4d06-a84e-dc56e250514c","Type":"ContainerDied","Data":"3568ce9da9c8d8fe7d9f3751dfe7e97a31d37c821a01d04d9411e597b1b46999"} Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.842041 4773 scope.go:117] "RemoveContainer" containerID="703a45160239152c8b7d740aeea8cc73cd6c09b0a35fa470daa71bdf7f10e577" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.842317 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.850466 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"aed7f398-6b73-4830-aa88-db87be2b99a0","Type":"ContainerDied","Data":"e9caab10710c70bddc60aeb018b91f13429ff97d8ea1f39287b8d4e3ca62e848"} Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.850585 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.854938 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fj6wj_7865c5e1-9bcc-467f-8a58-1adfaf30ed28/ovs-vswitchd/0.log" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.855861 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fj6wj" event={"ID":"7865c5e1-9bcc-467f-8a58-1adfaf30ed28","Type":"ContainerDied","Data":"0757ac3ab1be556d2afa95eeac8640d5f4faef141831a893d7b3a9d6b52979b7"} Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.855941 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fj6wj" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.875527 4773 scope.go:117] "RemoveContainer" containerID="dd185ed78ea8d0ba5be7c159d3c9510adff0ac36b806be4c8fe7fd52820b4ab1" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.906827 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.910209 4773 scope.go:117] "RemoveContainer" containerID="cf1b9f6bfafc8703b57d6396414ff00ca8d8f849a42e8a643dd6012f8b0ce046" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.920141 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.927485 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.934401 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.937002 4773 scope.go:117] "RemoveContainer" containerID="08cda7f85172a8a3837740fbc97bd5edfa4527dcc2c0715be2ebaf2bac22e6b7" Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.939852 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-fj6wj"] Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.946664 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-fj6wj"] Jan 22 12:20:21 crc kubenswrapper[4773]: I0122 12:20:21.958487 4773 scope.go:117] "RemoveContainer" containerID="88011f552abb27872acf5ebae2c74881b96ee28ef4202e35cd34c74e6bc5e417" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.003255 4773 scope.go:117] "RemoveContainer" containerID="c18e352494406d6cec346bf8c21ed393d59ff5e3ef7616cda1e8dcff47f2665d" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.025431 4773 scope.go:117] "RemoveContainer" containerID="049e38f5a87408bec16aa9630dee0dfcbba21d9d01cf8238d9222592a966b9a8" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.054570 4773 scope.go:117] "RemoveContainer" containerID="d468ff37ff2469b69cb119a74db751875822b460315803fe8a44e5bcdf6c0ecf" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.076303 4773 scope.go:117] "RemoveContainer" containerID="dd9af132571b4bed486d397dfe0215f073e7f36c6b34aa87e560ea6a3f50a32e" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.096914 4773 scope.go:117] "RemoveContainer" containerID="72835dd244d879ebb03d43571334445f863b46f2f1f1ee2655d94a3e3907e8b5" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.118138 4773 scope.go:117] "RemoveContainer" containerID="fce77a5acf624e2bfa4fbd706de0a7bc45675dcc2a670de3f60e284d1156e388" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.134063 4773 scope.go:117] "RemoveContainer" containerID="186e2a68f59e8f83347891cfa964ee1ba154ccbd86503a5b5bcfcac2d36382b6" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.153650 4773 scope.go:117] "RemoveContainer" containerID="8614587119ff68cbab75cc2f49c17c865dbaaace22e54b8c99cb0c2c1183280b" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.176334 4773 scope.go:117] "RemoveContainer" containerID="4550c34e059dc4785794eb4b0ed847aef46b118c21bcc68b6e25d13c02b29550" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.192535 4773 scope.go:117] "RemoveContainer" containerID="e9546b98e820ca3d0bc3c7f7f6af1821b37a056264052a38280687379bf170a4" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.209065 4773 scope.go:117] "RemoveContainer" containerID="231d892f6fe508e5896980a2b5938e377e1f57b840196329664f9ef1b5b26d99" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.225069 4773 scope.go:117] "RemoveContainer" containerID="1479304cf57df7fc13f187bc0a84e2fe6c7845f988d1d96f82b74355bc8045dd" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.241117 4773 scope.go:117] "RemoveContainer" containerID="42b4a680b9ab6f01625aa6fb2adbfb8bbf5aae4c508bee93ac7d6dc0dbbe298f" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.265762 4773 scope.go:117] "RemoveContainer" containerID="705fdb754cdeafbe298498bb2aebe4ee5d41ce53a5d3c9728ddc8bd76be18dd9" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.288734 4773 scope.go:117] "RemoveContainer" containerID="57c81abee5e0a5a46aa6bf73d2046706a6dbbdb9688f83972e15931e0150b254" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.669486 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" path="/var/lib/kubelet/pods/6d5ae4d3-bfc2-4d06-a84e-dc56e250514c/volumes" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.675922 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" path="/var/lib/kubelet/pods/7865c5e1-9bcc-467f-8a58-1adfaf30ed28/volumes" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.676710 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" path="/var/lib/kubelet/pods/aed7f398-6b73-4830-aa88-db87be2b99a0/volumes" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.872985 4773 generic.go:334] "Generic (PLEG): container finished" podID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerID="dbc1b5a000ae79e34485e55c8fd8281fd6a21a3c49f844792e4c8e5d1d098c81" exitCode=137 Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.873054 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" event={"ID":"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15","Type":"ContainerDied","Data":"dbc1b5a000ae79e34485e55c8fd8281fd6a21a3c49f844792e4c8e5d1d098c81"} Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.873092 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" event={"ID":"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15","Type":"ContainerDied","Data":"a56c5a91dc332b73b9b74b7e61f3cbcf16ecab195bd85059e7802e603ab81f56"} Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.873114 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a56c5a91dc332b73b9b74b7e61f3cbcf16ecab195bd85059e7802e603ab81f56" Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.876765 4773 generic.go:334] "Generic (PLEG): container finished" podID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerID="46a2db3b91ee50e8ade9df5dbd38ca1fb9b622a943ee6732a2160bba788b0474" exitCode=137 Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.876799 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" event={"ID":"81a2f40b-f1de-449b-9e0f-45171ffa318c","Type":"ContainerDied","Data":"46a2db3b91ee50e8ade9df5dbd38ca1fb9b622a943ee6732a2160bba788b0474"} Jan 22 12:20:22 crc kubenswrapper[4773]: I0122 12:20:22.886379 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.056489 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data-custom\") pod \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.056568 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjtxp\" (UniqueName: \"kubernetes.io/projected/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-kube-api-access-zjtxp\") pod \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.056802 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data\") pod \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.056865 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-combined-ca-bundle\") pod \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.056920 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-logs\") pod \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\" (UID: \"bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.057810 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-logs" (OuterVolumeSpecName: "logs") pod "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" (UID: "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.082274 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-kube-api-access-zjtxp" (OuterVolumeSpecName: "kube-api-access-zjtxp") pod "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" (UID: "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15"). InnerVolumeSpecName "kube-api-access-zjtxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.082642 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" (UID: "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.089699 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" (UID: "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.112817 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data" (OuterVolumeSpecName: "config-data") pod "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" (UID: "bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.158548 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.158590 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjtxp\" (UniqueName: \"kubernetes.io/projected/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-kube-api-access-zjtxp\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.158605 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.158614 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.158624 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.255488 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.361214 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data\") pod \"81a2f40b-f1de-449b-9e0f-45171ffa318c\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.361860 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data-custom\") pod \"81a2f40b-f1de-449b-9e0f-45171ffa318c\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.362346 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-combined-ca-bundle\") pod \"81a2f40b-f1de-449b-9e0f-45171ffa318c\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.362716 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81a2f40b-f1de-449b-9e0f-45171ffa318c-logs\") pod \"81a2f40b-f1de-449b-9e0f-45171ffa318c\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.362998 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxvc7\" (UniqueName: \"kubernetes.io/projected/81a2f40b-f1de-449b-9e0f-45171ffa318c-kube-api-access-qxvc7\") pod \"81a2f40b-f1de-449b-9e0f-45171ffa318c\" (UID: \"81a2f40b-f1de-449b-9e0f-45171ffa318c\") " Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.363112 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81a2f40b-f1de-449b-9e0f-45171ffa318c-logs" (OuterVolumeSpecName: "logs") pod "81a2f40b-f1de-449b-9e0f-45171ffa318c" (UID: "81a2f40b-f1de-449b-9e0f-45171ffa318c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.363916 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81a2f40b-f1de-449b-9e0f-45171ffa318c-logs\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.365060 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "81a2f40b-f1de-449b-9e0f-45171ffa318c" (UID: "81a2f40b-f1de-449b-9e0f-45171ffa318c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.366719 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81a2f40b-f1de-449b-9e0f-45171ffa318c-kube-api-access-qxvc7" (OuterVolumeSpecName: "kube-api-access-qxvc7") pod "81a2f40b-f1de-449b-9e0f-45171ffa318c" (UID: "81a2f40b-f1de-449b-9e0f-45171ffa318c"). InnerVolumeSpecName "kube-api-access-qxvc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.396586 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81a2f40b-f1de-449b-9e0f-45171ffa318c" (UID: "81a2f40b-f1de-449b-9e0f-45171ffa318c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.397638 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data" (OuterVolumeSpecName: "config-data") pod "81a2f40b-f1de-449b-9e0f-45171ffa318c" (UID: "81a2f40b-f1de-449b-9e0f-45171ffa318c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.466526 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxvc7\" (UniqueName: \"kubernetes.io/projected/81a2f40b-f1de-449b-9e0f-45171ffa318c-kube-api-access-qxvc7\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.466918 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.467061 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.467204 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a2f40b-f1de-449b-9e0f-45171ffa318c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.889148 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5b5f5dd584-sql6t" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.889164 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" event={"ID":"81a2f40b-f1de-449b-9e0f-45171ffa318c","Type":"ContainerDied","Data":"fa13ffc11bb0cb1376f9ff0ebf9a898643326cce516ce038d1c5c490dca06b27"} Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.889699 4773 scope.go:117] "RemoveContainer" containerID="46a2db3b91ee50e8ade9df5dbd38ca1fb9b622a943ee6732a2160bba788b0474" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.889246 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5f675cf6c9-7wz9g" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.924187 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-5b5f5dd584-sql6t"] Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.929483 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-5b5f5dd584-sql6t"] Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.934959 4773 scope.go:117] "RemoveContainer" containerID="87124795355c6f0d6915d6a49102ef60c2f5d2076a9656aff323f785f19478f7" Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.951060 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5f675cf6c9-7wz9g"] Jan 22 12:20:23 crc kubenswrapper[4773]: I0122 12:20:23.957264 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-5f675cf6c9-7wz9g"] Jan 22 12:20:24 crc kubenswrapper[4773]: I0122 12:20:24.673567 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" path="/var/lib/kubelet/pods/81a2f40b-f1de-449b-9e0f-45171ffa318c/volumes" Jan 22 12:20:24 crc kubenswrapper[4773]: I0122 12:20:24.674835 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" path="/var/lib/kubelet/pods/bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15/volumes" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.167035 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d6p2d"] Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.171028 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-reaper" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.171126 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-reaper" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.171211 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.171308 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-server" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.171388 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.171467 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-server" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.171573 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.171671 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.171747 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.171815 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.171888 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="setup-container" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.171954 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="setup-container" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.172063 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.172157 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.172234 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.172385 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.172463 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.172531 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.172619 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.172723 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.172803 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-updater" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.172873 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-updater" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.172948 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.173018 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.173095 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-central-agent" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.173175 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-central-agent" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.173251 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.173356 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.173454 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.173533 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.173610 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="rabbitmq" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.173704 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="rabbitmq" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.173846 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.173945 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.174051 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.174138 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.174218 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.174342 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.174434 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-metadata" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.174512 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-metadata" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.174582 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="cinder-scheduler" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.174659 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="cinder-scheduler" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.174737 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.174808 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.174889 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerName="nova-cell1-conductor-conductor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.174960 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerName="nova-cell1-conductor-conductor" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.175037 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.175115 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.175192 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server-init" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.175262 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server-init" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.175466 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.175542 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.175615 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-updater" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.175687 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-updater" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.175772 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="ovn-northd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.175851 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="ovn-northd" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.175935 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.176007 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-api" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.176087 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1683acba-c129-4a7c-866c-421cdb0e6505" containerName="nova-cell0-conductor-conductor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.176160 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1683acba-c129-4a7c-866c-421cdb0e6505" containerName="nova-cell0-conductor-conductor" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.176243 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.176352 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.176482 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" containerName="memcached" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.176578 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" containerName="memcached" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.176678 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" containerName="kube-state-metrics" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.178497 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" containerName="kube-state-metrics" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.178652 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-notification-agent" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.178773 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-notification-agent" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.178886 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="rsync" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.179022 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="rsync" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.179136 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerName="mysql-bootstrap" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.179232 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerName="mysql-bootstrap" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.179357 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.179452 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.179553 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" containerName="nova-scheduler-scheduler" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.179702 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" containerName="nova-scheduler-scheduler" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.179843 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-expirer" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.179927 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-expirer" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.180009 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerName="setup-container" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.180079 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerName="setup-container" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.180153 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.180231 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.180330 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.180406 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.180480 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.180562 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-api" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.180660 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="proxy-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.180753 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="proxy-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.180863 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.180975 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.181089 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerName="galera" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.181189 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerName="galera" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.181315 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="swift-recon-cron" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.181417 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="swift-recon-cron" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.181580 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="sg-core" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.181684 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="sg-core" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.181796 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerName="rabbitmq" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.181897 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerName="rabbitmq" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.182004 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.182099 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.182213 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="probe" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.182359 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="probe" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.182471 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.182571 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.182692 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.182792 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.182892 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.183014 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.183111 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="openstack-network-exporter" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.183211 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="openstack-network-exporter" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.183336 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.183457 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-server" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.183561 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" containerName="keystone-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.183672 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" containerName="keystone-api" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.183775 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.183985 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: E0122 12:20:29.184105 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.184222 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.184661 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.184800 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.184907 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="cinder-scheduler" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185013 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1683acba-c129-4a7c-866c-421cdb0e6505" containerName="nova-cell0-conductor-conductor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185122 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="proxy-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185222 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185358 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovsdb-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185504 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185630 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed7f398-6b73-4830-aa88-db87be2b99a0" containerName="probe" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185808 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.185922 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186038 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186144 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186250 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9c73637-566a-47b5-bba6-97948a973a47" containerName="glance-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186402 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="rsync" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186511 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186615 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f681589a-ad29-4485-9313-7e63da547635" containerName="glance-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186715 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="384ccba5-b841-48d7-bdb6-ad40c08d6c8f" containerName="memcached" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186815 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="757f37a0-3cc7-4d57-a956-83d236d6cebc" containerName="ovn-controller" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.186920 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="swift-recon-cron" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187039 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187246 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-replicator" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187415 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-metadata" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187536 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187644 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-updater" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187752 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="openstack-network-exporter" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187863 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="sg-core" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.187971 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="311884d0-65e1-4737-a7ba-efc94510b90b" containerName="nova-metadata-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188076 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188313 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd2271e5-3ded-4e0b-b4d5-3e2fa5bc0c15" containerName="barbican-keystone-listener" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188459 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7865c5e1-9bcc-467f-8a58-1adfaf30ed28" containerName="ovs-vswitchd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188586 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="container-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188716 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4c14d2f-5507-4d08-be37-55d77b5491a3" containerName="rabbitmq" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188838 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="81a2f40b-f1de-449b-9e0f-45171ffa318c" containerName="barbican-worker-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.188943 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-updater" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.189051 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-expirer" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.189147 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.189246 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5dc7991-ffde-4ef2-9668-e07d7c4aa614" containerName="barbican-worker-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.189556 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-reaper" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191563 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a393de80-9ad0-413e-a2a1-6ee14de22049" containerName="neutron-httpd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191634 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9643af6-36f5-46b0-9cca-b9fe67a689dd" containerName="galera" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191664 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="04725b89-f1ec-45f8-a69a-5427230da499" containerName="cinder-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191688 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191714 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="object-auditor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191739 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-notification-agent" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191766 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a688a76d-2498-4542-8285-709caf211e8a" containerName="rabbitmq" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191798 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="21901911-8523-4adc-9851-336360f4c11e" containerName="ovn-northd" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191823 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b99e28b-4c7c-4ec2-83b3-05658c9dd0ef" containerName="keystone-api" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191847 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8dfee1b-6f2f-40c0-b676-64e94df5f64d" containerName="nova-scheduler-scheduler" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191876 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d5ae4d3-bfc2-4d06-a84e-dc56e250514c" containerName="account-server" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191897 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62b80aa9-cd54-4a8a-aea4-0fcbe7a4d94e" containerName="barbican-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191924 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4905ba-f2f9-42c0-a21b-fabf4046af68" containerName="kube-state-metrics" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191957 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d41353c-ea0e-4005-acec-dc25faae5840" containerName="ceilometer-central-agent" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191975 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="637a651c-e338-45ba-8bd3-a8f838500893" containerName="nova-api-log" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.191996 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9cded2e-ee73-4606-8df8-f52bb4bb139d" containerName="nova-cell1-conductor-conductor" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.194525 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d6p2d"] Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.194735 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.255967 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-catalog-content\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.256053 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x7jp\" (UniqueName: \"kubernetes.io/projected/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-kube-api-access-6x7jp\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.256099 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-utilities\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.357853 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x7jp\" (UniqueName: \"kubernetes.io/projected/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-kube-api-access-6x7jp\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.357923 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-utilities\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.358014 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-catalog-content\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.358539 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-catalog-content\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.358693 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-utilities\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.385395 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x7jp\" (UniqueName: \"kubernetes.io/projected/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-kube-api-access-6x7jp\") pod \"certified-operators-d6p2d\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:29 crc kubenswrapper[4773]: I0122 12:20:29.525178 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:30 crc kubenswrapper[4773]: I0122 12:20:30.012341 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d6p2d"] Jan 22 12:20:30 crc kubenswrapper[4773]: W0122 12:20:30.019004 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29e87a36_9ebd_4fc8_b3b3_a284f557dac5.slice/crio-ca7e66855e384e57148fcc727fb35378479d4afba30604989d16365175aa6291 WatchSource:0}: Error finding container ca7e66855e384e57148fcc727fb35378479d4afba30604989d16365175aa6291: Status 404 returned error can't find the container with id ca7e66855e384e57148fcc727fb35378479d4afba30604989d16365175aa6291 Jan 22 12:20:30 crc kubenswrapper[4773]: I0122 12:20:30.971991 4773 generic.go:334] "Generic (PLEG): container finished" podID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerID="be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1" exitCode=0 Jan 22 12:20:30 crc kubenswrapper[4773]: I0122 12:20:30.972039 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d6p2d" event={"ID":"29e87a36-9ebd-4fc8-b3b3-a284f557dac5","Type":"ContainerDied","Data":"be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1"} Jan 22 12:20:30 crc kubenswrapper[4773]: I0122 12:20:30.972411 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d6p2d" event={"ID":"29e87a36-9ebd-4fc8-b3b3-a284f557dac5","Type":"ContainerStarted","Data":"ca7e66855e384e57148fcc727fb35378479d4afba30604989d16365175aa6291"} Jan 22 12:20:34 crc kubenswrapper[4773]: I0122 12:20:34.074910 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:20:34 crc kubenswrapper[4773]: I0122 12:20:34.075394 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:20:39 crc kubenswrapper[4773]: I0122 12:20:39.064040 4773 generic.go:334] "Generic (PLEG): container finished" podID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerID="9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9" exitCode=0 Jan 22 12:20:39 crc kubenswrapper[4773]: I0122 12:20:39.064177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d6p2d" event={"ID":"29e87a36-9ebd-4fc8-b3b3-a284f557dac5","Type":"ContainerDied","Data":"9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9"} Jan 22 12:20:40 crc kubenswrapper[4773]: I0122 12:20:40.075980 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d6p2d" event={"ID":"29e87a36-9ebd-4fc8-b3b3-a284f557dac5","Type":"ContainerStarted","Data":"dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6"} Jan 22 12:20:40 crc kubenswrapper[4773]: I0122 12:20:40.099872 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d6p2d" podStartSLOduration=2.513501866 podStartE2EDuration="11.099847704s" podCreationTimestamp="2026-01-22 12:20:29 +0000 UTC" firstStartedPulling="2026-01-22 12:20:30.973808908 +0000 UTC m=+1538.551924733" lastFinishedPulling="2026-01-22 12:20:39.560154736 +0000 UTC m=+1547.138270571" observedRunningTime="2026-01-22 12:20:40.091100629 +0000 UTC m=+1547.669216454" watchObservedRunningTime="2026-01-22 12:20:40.099847704 +0000 UTC m=+1547.677963569" Jan 22 12:20:49 crc kubenswrapper[4773]: I0122 12:20:49.525527 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:49 crc kubenswrapper[4773]: I0122 12:20:49.526457 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:49 crc kubenswrapper[4773]: I0122 12:20:49.615146 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:50 crc kubenswrapper[4773]: I0122 12:20:50.249502 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:50 crc kubenswrapper[4773]: I0122 12:20:50.310511 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d6p2d"] Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.200462 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d6p2d" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="registry-server" containerID="cri-o://dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6" gracePeriod=2 Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.635139 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.741271 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x7jp\" (UniqueName: \"kubernetes.io/projected/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-kube-api-access-6x7jp\") pod \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.741658 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-catalog-content\") pod \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.741695 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-utilities\") pod \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\" (UID: \"29e87a36-9ebd-4fc8-b3b3-a284f557dac5\") " Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.743913 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-utilities" (OuterVolumeSpecName: "utilities") pod "29e87a36-9ebd-4fc8-b3b3-a284f557dac5" (UID: "29e87a36-9ebd-4fc8-b3b3-a284f557dac5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.752625 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-kube-api-access-6x7jp" (OuterVolumeSpecName: "kube-api-access-6x7jp") pod "29e87a36-9ebd-4fc8-b3b3-a284f557dac5" (UID: "29e87a36-9ebd-4fc8-b3b3-a284f557dac5"). InnerVolumeSpecName "kube-api-access-6x7jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.829563 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29e87a36-9ebd-4fc8-b3b3-a284f557dac5" (UID: "29e87a36-9ebd-4fc8-b3b3-a284f557dac5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.843797 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x7jp\" (UniqueName: \"kubernetes.io/projected/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-kube-api-access-6x7jp\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.843841 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:52 crc kubenswrapper[4773]: I0122 12:20:52.843859 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29e87a36-9ebd-4fc8-b3b3-a284f557dac5-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.220670 4773 generic.go:334] "Generic (PLEG): container finished" podID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerID="dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6" exitCode=0 Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.220734 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d6p2d" event={"ID":"29e87a36-9ebd-4fc8-b3b3-a284f557dac5","Type":"ContainerDied","Data":"dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6"} Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.220785 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d6p2d" event={"ID":"29e87a36-9ebd-4fc8-b3b3-a284f557dac5","Type":"ContainerDied","Data":"ca7e66855e384e57148fcc727fb35378479d4afba30604989d16365175aa6291"} Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.220787 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d6p2d" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.220814 4773 scope.go:117] "RemoveContainer" containerID="dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.245069 4773 scope.go:117] "RemoveContainer" containerID="9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.259390 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d6p2d"] Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.266117 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d6p2d"] Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.280562 4773 scope.go:117] "RemoveContainer" containerID="be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.311921 4773 scope.go:117] "RemoveContainer" containerID="dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6" Jan 22 12:20:53 crc kubenswrapper[4773]: E0122 12:20:53.312444 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6\": container with ID starting with dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6 not found: ID does not exist" containerID="dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.312477 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6"} err="failed to get container status \"dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6\": rpc error: code = NotFound desc = could not find container \"dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6\": container with ID starting with dde3cec686beacf62f6ed155cdbdd4e2fdb812b1a3ab27e824ca2513575753e6 not found: ID does not exist" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.312499 4773 scope.go:117] "RemoveContainer" containerID="9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9" Jan 22 12:20:53 crc kubenswrapper[4773]: E0122 12:20:53.313039 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9\": container with ID starting with 9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9 not found: ID does not exist" containerID="9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.313097 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9"} err="failed to get container status \"9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9\": rpc error: code = NotFound desc = could not find container \"9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9\": container with ID starting with 9e14ce310a7c0dd5c3b9f7ea117a28e478bbb52cb011360ab62fc5dc40831ea9 not found: ID does not exist" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.313135 4773 scope.go:117] "RemoveContainer" containerID="be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1" Jan 22 12:20:53 crc kubenswrapper[4773]: E0122 12:20:53.313542 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1\": container with ID starting with be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1 not found: ID does not exist" containerID="be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1" Jan 22 12:20:53 crc kubenswrapper[4773]: I0122 12:20:53.313609 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1"} err="failed to get container status \"be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1\": rpc error: code = NotFound desc = could not find container \"be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1\": container with ID starting with be761498c718fc2707255f483915093d4f69a27a79fbc4908ed7b29904c644c1 not found: ID does not exist" Jan 22 12:20:54 crc kubenswrapper[4773]: I0122 12:20:54.676719 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" path="/var/lib/kubelet/pods/29e87a36-9ebd-4fc8-b3b3-a284f557dac5/volumes" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.727190 4773 scope.go:117] "RemoveContainer" containerID="0c89a12963a143e2b4ba0fe70de80385a721e9ebda036a4115b6dc5e805ca36d" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.766740 4773 scope.go:117] "RemoveContainer" containerID="e24ddfd76fa1df92fbb4c9d0a52f1d039f2299b447dbc760897120433c3fd08e" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.799593 4773 scope.go:117] "RemoveContainer" containerID="8f6fb3df8ffd981f7e9f28222c8087c9b5d58f85290053a881c73ef8ed2e0dcb" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.849526 4773 scope.go:117] "RemoveContainer" containerID="8d2a6050e2267a0f976543f79d40ed2283eece122c1f12726a2431e1a337b88a" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.876183 4773 scope.go:117] "RemoveContainer" containerID="8b7efe8e4bbf533e7dc8989603b738a0ad4692d7d386a7b7224f95d3a3d096aa" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.902844 4773 scope.go:117] "RemoveContainer" containerID="dba0ccb61ced9efa67fe09f169cc91f5cfd0e2d646aa717d90a06e04a0c5a34d" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.925419 4773 scope.go:117] "RemoveContainer" containerID="6bdf627901783056ccf143a1bf488b67db1d389700b6b85cfe24b43321a70af4" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.949965 4773 scope.go:117] "RemoveContainer" containerID="7abeeb25a28ee1cf287a8f8112c5b0ed49bd1977fc5bfc686b83f3ac4dd09463" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.973952 4773 scope.go:117] "RemoveContainer" containerID="5fbb45accf2025632e101b486f97d89eaf10a6d83beaef12851e44a769abaae8" Jan 22 12:20:59 crc kubenswrapper[4773]: I0122 12:20:59.999503 4773 scope.go:117] "RemoveContainer" containerID="2d90c9b5a0623109c593eb949a72965be23283a3b00ef0a41a7c83caaf0e9e07" Jan 22 12:21:00 crc kubenswrapper[4773]: I0122 12:21:00.030886 4773 scope.go:117] "RemoveContainer" containerID="5115d35ee6bb43c24c3d875f3e5f3d449a9b5e2ac3662b5da588b01bf65d687a" Jan 22 12:21:00 crc kubenswrapper[4773]: I0122 12:21:00.058500 4773 scope.go:117] "RemoveContainer" containerID="10f9a6cfd3b67be41810a7b70e014044939bb3dabad0bead65708e24bd630e37" Jan 22 12:21:00 crc kubenswrapper[4773]: I0122 12:21:00.089390 4773 scope.go:117] "RemoveContainer" containerID="67316730ec155c07e7e601cb8c3c3be87325a29333647bce87c3036b77e66dbf" Jan 22 12:21:00 crc kubenswrapper[4773]: I0122 12:21:00.110359 4773 scope.go:117] "RemoveContainer" containerID="15e25dd63bf3901cd9fd02136357daec98bd55eadc8869b0674ea5be9ff443d9" Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.074803 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.075200 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.075276 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.076450 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.076569 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" gracePeriod=600 Jan 22 12:21:04 crc kubenswrapper[4773]: E0122 12:21:04.209205 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.348325 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad"} Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.348274 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" exitCode=0 Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.348376 4773 scope.go:117] "RemoveContainer" containerID="e8c9c494f3234f8ef97d1206589812616f50e7a5ed49d844fa29e97c95590447" Jan 22 12:21:04 crc kubenswrapper[4773]: I0122 12:21:04.349319 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:21:04 crc kubenswrapper[4773]: E0122 12:21:04.349690 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:21:15 crc kubenswrapper[4773]: I0122 12:21:15.658617 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:21:15 crc kubenswrapper[4773]: E0122 12:21:15.659594 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:21:27 crc kubenswrapper[4773]: I0122 12:21:27.658179 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:21:27 crc kubenswrapper[4773]: E0122 12:21:27.658980 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:21:41 crc kubenswrapper[4773]: I0122 12:21:41.658568 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:21:41 crc kubenswrapper[4773]: E0122 12:21:41.659468 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:21:55 crc kubenswrapper[4773]: I0122 12:21:55.659043 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:21:55 crc kubenswrapper[4773]: E0122 12:21:55.660400 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.504703 4773 scope.go:117] "RemoveContainer" containerID="83c0def5b56b44cb9f1f01205a511238aa72173c21dd369de734bca93e5a1030" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.578383 4773 scope.go:117] "RemoveContainer" containerID="77463d8c190922bd049e9a6f4d1f099161dade201cddd23bc77101b86ec4feb6" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.596600 4773 scope.go:117] "RemoveContainer" containerID="85dfcb89d3acb7001ec4f59fefda1afa3e6872c76587b0b797769f9330c1fc35" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.643555 4773 scope.go:117] "RemoveContainer" containerID="3ffb7fb743609268c9120667fae8f80a0ebb42bf2a809b8cde494ed6f0bc19ab" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.669768 4773 scope.go:117] "RemoveContainer" containerID="2de153ceb8260e4aaf9c5eb8db18fd21a627cc2a8243dd5b0c08d3df7a9725b0" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.692153 4773 scope.go:117] "RemoveContainer" containerID="41f034e2200a964f491e83e9757b4cd91acd4b67ef1b1be157cb223ac98e6685" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.717121 4773 scope.go:117] "RemoveContainer" containerID="022567fecc826144588132a7fe6f8b3c18be6d50e4f697d1e29bf495c3931bd5" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.751496 4773 scope.go:117] "RemoveContainer" containerID="8d1e9af75c0e1f61a4f35634e8cf84897907c8ed147661c4cf3eed4f653b5990" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.794475 4773 scope.go:117] "RemoveContainer" containerID="15c4231fb715135dd1011052cf51192cc080b32f3bddb8c8b960f54b1c47020b" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.823856 4773 scope.go:117] "RemoveContainer" containerID="858fe7c568845f4c55a8c03dec3382290ef6c422cd08d419d67d8b3aee2a814d" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.864240 4773 scope.go:117] "RemoveContainer" containerID="87c1a40a3866ca565feb721acdc98dd566ca7d2caac09a1e364b6fe3fe20adae" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.899353 4773 scope.go:117] "RemoveContainer" containerID="26548a6cc894a5bb88b74830b61f9e63616ad47105eb4dc8ab517b549d825079" Jan 22 12:22:00 crc kubenswrapper[4773]: I0122 12:22:00.916621 4773 scope.go:117] "RemoveContainer" containerID="5b0b9f019f40afbdc470bca3f7127866655fa613de315b150d30f0d4850cd7b0" Jan 22 12:22:07 crc kubenswrapper[4773]: I0122 12:22:07.658807 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:22:07 crc kubenswrapper[4773]: E0122 12:22:07.659416 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:22:21 crc kubenswrapper[4773]: I0122 12:22:21.657790 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:22:21 crc kubenswrapper[4773]: E0122 12:22:21.658817 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:22:32 crc kubenswrapper[4773]: I0122 12:22:32.661631 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:22:32 crc kubenswrapper[4773]: E0122 12:22:32.662373 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:22:44 crc kubenswrapper[4773]: I0122 12:22:44.658893 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:22:44 crc kubenswrapper[4773]: E0122 12:22:44.660664 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:22:55 crc kubenswrapper[4773]: I0122 12:22:55.659162 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:22:55 crc kubenswrapper[4773]: E0122 12:22:55.660018 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:23:01 crc kubenswrapper[4773]: I0122 12:23:01.103189 4773 scope.go:117] "RemoveContainer" containerID="92e115034ee29042f3b09c0140b70b158d673295708a85c55b272e8fff093d15" Jan 22 12:23:01 crc kubenswrapper[4773]: I0122 12:23:01.176828 4773 scope.go:117] "RemoveContainer" containerID="c3e7bf2d8e9482874620abd54b6917871f15ea49d5e6938c1ed6c6f4ead38926" Jan 22 12:23:01 crc kubenswrapper[4773]: I0122 12:23:01.203764 4773 scope.go:117] "RemoveContainer" containerID="b3bc996c5fce4a346e5c10f4bff70bbba9478b888eca409f8c86b9e8ed609774" Jan 22 12:23:01 crc kubenswrapper[4773]: I0122 12:23:01.256415 4773 scope.go:117] "RemoveContainer" containerID="00074010fbe5c8eb219e7350840b0f486f659b0e42b97f83273dec4f5a642383" Jan 22 12:23:08 crc kubenswrapper[4773]: I0122 12:23:08.663891 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:23:08 crc kubenswrapper[4773]: E0122 12:23:08.664707 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:23:21 crc kubenswrapper[4773]: I0122 12:23:21.825817 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:23:21 crc kubenswrapper[4773]: E0122 12:23:21.827201 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:23:32 crc kubenswrapper[4773]: I0122 12:23:32.832269 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:23:32 crc kubenswrapper[4773]: E0122 12:23:32.833077 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:23:45 crc kubenswrapper[4773]: I0122 12:23:45.658234 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:23:45 crc kubenswrapper[4773]: E0122 12:23:45.658958 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:24:00 crc kubenswrapper[4773]: I0122 12:24:00.657920 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:24:00 crc kubenswrapper[4773]: E0122 12:24:00.658704 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.344517 4773 scope.go:117] "RemoveContainer" containerID="b23e36a154e8bd280a582d1bff462b1409937c638edaa9eeeed72ca40585f7bc" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.378134 4773 scope.go:117] "RemoveContainer" containerID="ec104a46c303372809d61350bbc1b44c16590837f54cef8d940afde155b334e5" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.397915 4773 scope.go:117] "RemoveContainer" containerID="029d6b50f01ce8fa59024b0d1eaad9c1f17eb7a636625bf9bcd2dacaa13828f7" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.437643 4773 scope.go:117] "RemoveContainer" containerID="74b83a04fdad0dadc32143fbca6d09d286f812011493ed35ba77825a03b9f5c1" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.481628 4773 scope.go:117] "RemoveContainer" containerID="aab3f6dfc1780bf96a3c69e8b0c5eb559ab8d37f13dfa4bbf2394a578877f335" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.522008 4773 scope.go:117] "RemoveContainer" containerID="b7af3d314ac833c980770e1aea6c18199d9bb9e943d9d465a4255d04163f0312" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.557850 4773 scope.go:117] "RemoveContainer" containerID="14024674f87e3ede95f2eb32c3831caf7330b555c63eb56d5c802f11161870a3" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.589427 4773 scope.go:117] "RemoveContainer" containerID="27c3e0c464e09904962671874cb61cd552b7a21244651c606261999643bca822" Jan 22 12:24:01 crc kubenswrapper[4773]: I0122 12:24:01.630916 4773 scope.go:117] "RemoveContainer" containerID="d4ccce0b1dc1cb12e6fbde6555905a3054bf04d220364144813fb3c66dbd8f9a" Jan 22 12:24:11 crc kubenswrapper[4773]: I0122 12:24:11.657916 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:24:11 crc kubenswrapper[4773]: E0122 12:24:11.658894 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:24:25 crc kubenswrapper[4773]: I0122 12:24:25.658672 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:24:25 crc kubenswrapper[4773]: E0122 12:24:25.659746 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:24:40 crc kubenswrapper[4773]: I0122 12:24:40.658790 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:24:40 crc kubenswrapper[4773]: E0122 12:24:40.661321 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:24:52 crc kubenswrapper[4773]: I0122 12:24:52.666391 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:24:52 crc kubenswrapper[4773]: E0122 12:24:52.667168 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:25:01 crc kubenswrapper[4773]: I0122 12:25:01.806497 4773 scope.go:117] "RemoveContainer" containerID="9d84b246a68941e41889b1877df1080c54dc73d0b7df6ecbeb2af3a992b9f448" Jan 22 12:25:01 crc kubenswrapper[4773]: I0122 12:25:01.880145 4773 scope.go:117] "RemoveContainer" containerID="9d37e09ac80e6c67a8c9a0e03394d9e19ff82acb6e4f4e52dde64dc3c023fdfe" Jan 22 12:25:01 crc kubenswrapper[4773]: I0122 12:25:01.903987 4773 scope.go:117] "RemoveContainer" containerID="9d65f24b9bce874786b86a52e97303e7993818c6f384ef400144e67856e5049f" Jan 22 12:25:01 crc kubenswrapper[4773]: I0122 12:25:01.953120 4773 scope.go:117] "RemoveContainer" containerID="c74b80d49878948d531eb721e047f59142c7de6845ab3140c2d9f029db898d0a" Jan 22 12:25:02 crc kubenswrapper[4773]: I0122 12:25:02.005658 4773 scope.go:117] "RemoveContainer" containerID="55704dc681b7f0b63c484b59d3778f9c323873b4e2e6cccd2b1dc30aecbc8f1e" Jan 22 12:25:02 crc kubenswrapper[4773]: I0122 12:25:02.031690 4773 scope.go:117] "RemoveContainer" containerID="d36e3d210753b939d6e9d583e439f8568814359d8e8edd1bc3bde97ea773b8af" Jan 22 12:25:05 crc kubenswrapper[4773]: I0122 12:25:05.658785 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:25:05 crc kubenswrapper[4773]: E0122 12:25:05.659592 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:25:19 crc kubenswrapper[4773]: I0122 12:25:19.657991 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:25:19 crc kubenswrapper[4773]: E0122 12:25:19.658827 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:25:30 crc kubenswrapper[4773]: I0122 12:25:30.658757 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:25:30 crc kubenswrapper[4773]: E0122 12:25:30.659746 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:25:43 crc kubenswrapper[4773]: I0122 12:25:43.657557 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:25:43 crc kubenswrapper[4773]: E0122 12:25:43.658343 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.658328 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zn4gw"] Jan 22 12:25:57 crc kubenswrapper[4773]: E0122 12:25:57.659564 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="extract-utilities" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.659585 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="extract-utilities" Jan 22 12:25:57 crc kubenswrapper[4773]: E0122 12:25:57.659601 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="extract-content" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.659609 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="extract-content" Jan 22 12:25:57 crc kubenswrapper[4773]: E0122 12:25:57.659623 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="registry-server" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.659680 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="registry-server" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.659882 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e87a36-9ebd-4fc8-b3b3-a284f557dac5" containerName="registry-server" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.661360 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.667333 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn4gw"] Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.842687 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-catalog-content\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.842772 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sshw2\" (UniqueName: \"kubernetes.io/projected/31cb4b42-1830-451e-9928-814c5ed54abd-kube-api-access-sshw2\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.843381 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-utilities\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.944806 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-catalog-content\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.944856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sshw2\" (UniqueName: \"kubernetes.io/projected/31cb4b42-1830-451e-9928-814c5ed54abd-kube-api-access-sshw2\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.944878 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-utilities\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.945536 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-catalog-content\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.945655 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-utilities\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.971891 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sshw2\" (UniqueName: \"kubernetes.io/projected/31cb4b42-1830-451e-9928-814c5ed54abd-kube-api-access-sshw2\") pod \"redhat-marketplace-zn4gw\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:57 crc kubenswrapper[4773]: I0122 12:25:57.987514 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:25:58 crc kubenswrapper[4773]: I0122 12:25:58.436969 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn4gw"] Jan 22 12:25:58 crc kubenswrapper[4773]: I0122 12:25:58.648722 4773 generic.go:334] "Generic (PLEG): container finished" podID="31cb4b42-1830-451e-9928-814c5ed54abd" containerID="15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95" exitCode=0 Jan 22 12:25:58 crc kubenswrapper[4773]: I0122 12:25:58.648801 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerDied","Data":"15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95"} Jan 22 12:25:58 crc kubenswrapper[4773]: I0122 12:25:58.648882 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerStarted","Data":"71fd79eb28389867702ffe685e6ebc04000e944710ea2c896a7814b13e4db450"} Jan 22 12:25:58 crc kubenswrapper[4773]: I0122 12:25:58.651376 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:25:58 crc kubenswrapper[4773]: I0122 12:25:58.657782 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:25:58 crc kubenswrapper[4773]: E0122 12:25:58.658011 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:25:59 crc kubenswrapper[4773]: I0122 12:25:59.662015 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerStarted","Data":"0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1"} Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.050667 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jp8r9"] Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.052353 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.073150 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jp8r9"] Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.178598 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-catalog-content\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.178717 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-utilities\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.178874 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znwsq\" (UniqueName: \"kubernetes.io/projected/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-kube-api-access-znwsq\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.280001 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-catalog-content\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.280111 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-utilities\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.280142 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znwsq\" (UniqueName: \"kubernetes.io/projected/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-kube-api-access-znwsq\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.280740 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-catalog-content\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.280769 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-utilities\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.313108 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znwsq\" (UniqueName: \"kubernetes.io/projected/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-kube-api-access-znwsq\") pod \"community-operators-jp8r9\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.386679 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.685904 4773 generic.go:334] "Generic (PLEG): container finished" podID="31cb4b42-1830-451e-9928-814c5ed54abd" containerID="0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1" exitCode=0 Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.686152 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerDied","Data":"0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1"} Jan 22 12:26:00 crc kubenswrapper[4773]: I0122 12:26:00.848344 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jp8r9"] Jan 22 12:26:01 crc kubenswrapper[4773]: I0122 12:26:01.698049 4773 generic.go:334] "Generic (PLEG): container finished" podID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerID="2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e" exitCode=0 Jan 22 12:26:01 crc kubenswrapper[4773]: I0122 12:26:01.698373 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerDied","Data":"2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e"} Jan 22 12:26:01 crc kubenswrapper[4773]: I0122 12:26:01.698497 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerStarted","Data":"de8bbd2708d2816bc2db489c256c4781ff69e77c71e89644d17181cd2975d179"} Jan 22 12:26:01 crc kubenswrapper[4773]: I0122 12:26:01.703809 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerStarted","Data":"edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86"} Jan 22 12:26:01 crc kubenswrapper[4773]: I0122 12:26:01.740430 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zn4gw" podStartSLOduration=2.313522347 podStartE2EDuration="4.740397261s" podCreationTimestamp="2026-01-22 12:25:57 +0000 UTC" firstStartedPulling="2026-01-22 12:25:58.651081152 +0000 UTC m=+1866.229196977" lastFinishedPulling="2026-01-22 12:26:01.077956066 +0000 UTC m=+1868.656071891" observedRunningTime="2026-01-22 12:26:01.738862527 +0000 UTC m=+1869.316978362" watchObservedRunningTime="2026-01-22 12:26:01.740397261 +0000 UTC m=+1869.318513096" Jan 22 12:26:02 crc kubenswrapper[4773]: I0122 12:26:02.170496 4773 scope.go:117] "RemoveContainer" containerID="dbc1b5a000ae79e34485e55c8fd8281fd6a21a3c49f844792e4c8e5d1d098c81" Jan 22 12:26:02 crc kubenswrapper[4773]: I0122 12:26:02.198269 4773 scope.go:117] "RemoveContainer" containerID="84743181e9bcf205d60cacb07eb6ab1bf0f5fac0214bec2c405bd822f753600e" Jan 22 12:26:02 crc kubenswrapper[4773]: I0122 12:26:02.222430 4773 scope.go:117] "RemoveContainer" containerID="889e6af5b48631940b0b05bbbc915b42895876b4aa8e1b9378483400c385f7a5" Jan 22 12:26:02 crc kubenswrapper[4773]: I0122 12:26:02.252013 4773 scope.go:117] "RemoveContainer" containerID="2aaa460bcd03f0a9d75d04fec79f711d27405af19bd05d0be33e14861ac29dee" Jan 22 12:26:02 crc kubenswrapper[4773]: I0122 12:26:02.711959 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerStarted","Data":"d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5"} Jan 22 12:26:03 crc kubenswrapper[4773]: I0122 12:26:03.723403 4773 generic.go:334] "Generic (PLEG): container finished" podID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerID="d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5" exitCode=0 Jan 22 12:26:03 crc kubenswrapper[4773]: I0122 12:26:03.723476 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerDied","Data":"d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5"} Jan 22 12:26:04 crc kubenswrapper[4773]: I0122 12:26:04.736473 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerStarted","Data":"37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34"} Jan 22 12:26:07 crc kubenswrapper[4773]: I0122 12:26:07.987914 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:26:07 crc kubenswrapper[4773]: I0122 12:26:07.988378 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:26:08 crc kubenswrapper[4773]: I0122 12:26:08.039460 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:26:08 crc kubenswrapper[4773]: I0122 12:26:08.063051 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jp8r9" podStartSLOduration=5.64076084 podStartE2EDuration="8.062994522s" podCreationTimestamp="2026-01-22 12:26:00 +0000 UTC" firstStartedPulling="2026-01-22 12:26:01.700261272 +0000 UTC m=+1869.278377137" lastFinishedPulling="2026-01-22 12:26:04.122494994 +0000 UTC m=+1871.700610819" observedRunningTime="2026-01-22 12:26:04.75784277 +0000 UTC m=+1872.335958655" watchObservedRunningTime="2026-01-22 12:26:08.062994522 +0000 UTC m=+1875.641110347" Jan 22 12:26:08 crc kubenswrapper[4773]: I0122 12:26:08.827967 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:26:10 crc kubenswrapper[4773]: I0122 12:26:10.387568 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:10 crc kubenswrapper[4773]: I0122 12:26:10.387692 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:10 crc kubenswrapper[4773]: I0122 12:26:10.464553 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:10 crc kubenswrapper[4773]: I0122 12:26:10.658659 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:26:10 crc kubenswrapper[4773]: I0122 12:26:10.861253 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:11 crc kubenswrapper[4773]: I0122 12:26:11.038334 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn4gw"] Jan 22 12:26:11 crc kubenswrapper[4773]: I0122 12:26:11.803046 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"eb3716c89db4b8ac7200ec86afec026406c4026858a60bd199ed8fb4bba2ea89"} Jan 22 12:26:11 crc kubenswrapper[4773]: I0122 12:26:11.803217 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zn4gw" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="registry-server" containerID="cri-o://edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86" gracePeriod=2 Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.206150 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.378367 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-utilities\") pod \"31cb4b42-1830-451e-9928-814c5ed54abd\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.378485 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-catalog-content\") pod \"31cb4b42-1830-451e-9928-814c5ed54abd\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.378535 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sshw2\" (UniqueName: \"kubernetes.io/projected/31cb4b42-1830-451e-9928-814c5ed54abd-kube-api-access-sshw2\") pod \"31cb4b42-1830-451e-9928-814c5ed54abd\" (UID: \"31cb4b42-1830-451e-9928-814c5ed54abd\") " Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.379895 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-utilities" (OuterVolumeSpecName: "utilities") pod "31cb4b42-1830-451e-9928-814c5ed54abd" (UID: "31cb4b42-1830-451e-9928-814c5ed54abd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.387738 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cb4b42-1830-451e-9928-814c5ed54abd-kube-api-access-sshw2" (OuterVolumeSpecName: "kube-api-access-sshw2") pod "31cb4b42-1830-451e-9928-814c5ed54abd" (UID: "31cb4b42-1830-451e-9928-814c5ed54abd"). InnerVolumeSpecName "kube-api-access-sshw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.416314 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31cb4b42-1830-451e-9928-814c5ed54abd" (UID: "31cb4b42-1830-451e-9928-814c5ed54abd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.479854 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.479900 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31cb4b42-1830-451e-9928-814c5ed54abd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.479927 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sshw2\" (UniqueName: \"kubernetes.io/projected/31cb4b42-1830-451e-9928-814c5ed54abd-kube-api-access-sshw2\") on node \"crc\" DevicePath \"\"" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.813723 4773 generic.go:334] "Generic (PLEG): container finished" podID="31cb4b42-1830-451e-9928-814c5ed54abd" containerID="edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86" exitCode=0 Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.813796 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerDied","Data":"edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86"} Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.813849 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zn4gw" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.813871 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zn4gw" event={"ID":"31cb4b42-1830-451e-9928-814c5ed54abd","Type":"ContainerDied","Data":"71fd79eb28389867702ffe685e6ebc04000e944710ea2c896a7814b13e4db450"} Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.813898 4773 scope.go:117] "RemoveContainer" containerID="edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.841244 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn4gw"] Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.848375 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jp8r9"] Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.849552 4773 scope.go:117] "RemoveContainer" containerID="0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.854765 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zn4gw"] Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.880380 4773 scope.go:117] "RemoveContainer" containerID="15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.896177 4773 scope.go:117] "RemoveContainer" containerID="edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86" Jan 22 12:26:12 crc kubenswrapper[4773]: E0122 12:26:12.896672 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86\": container with ID starting with edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86 not found: ID does not exist" containerID="edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.896712 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86"} err="failed to get container status \"edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86\": rpc error: code = NotFound desc = could not find container \"edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86\": container with ID starting with edd4ebaeaa23f9662ac5325ad6323e3542094219bb337328e040c7fbf72e9c86 not found: ID does not exist" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.896734 4773 scope.go:117] "RemoveContainer" containerID="0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1" Jan 22 12:26:12 crc kubenswrapper[4773]: E0122 12:26:12.897041 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1\": container with ID starting with 0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1 not found: ID does not exist" containerID="0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.897065 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1"} err="failed to get container status \"0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1\": rpc error: code = NotFound desc = could not find container \"0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1\": container with ID starting with 0766101f1c4a9e33cb098ed837e683ff689f65247709e961b88e1e2c90d927a1 not found: ID does not exist" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.897079 4773 scope.go:117] "RemoveContainer" containerID="15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95" Jan 22 12:26:12 crc kubenswrapper[4773]: E0122 12:26:12.897391 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95\": container with ID starting with 15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95 not found: ID does not exist" containerID="15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95" Jan 22 12:26:12 crc kubenswrapper[4773]: I0122 12:26:12.897409 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95"} err="failed to get container status \"15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95\": rpc error: code = NotFound desc = could not find container \"15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95\": container with ID starting with 15cbb10df9b350d29731f878a58f94611d13f83f0552463bdf3485df4e83ef95 not found: ID does not exist" Jan 22 12:26:13 crc kubenswrapper[4773]: I0122 12:26:13.830069 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jp8r9" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="registry-server" containerID="cri-o://37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34" gracePeriod=2 Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.377584 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.512411 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-utilities\") pod \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.512480 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-catalog-content\") pod \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.512806 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znwsq\" (UniqueName: \"kubernetes.io/projected/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-kube-api-access-znwsq\") pod \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\" (UID: \"934c8845-5845-4aac-a6f5-7a3b53cbfbc8\") " Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.513242 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-utilities" (OuterVolumeSpecName: "utilities") pod "934c8845-5845-4aac-a6f5-7a3b53cbfbc8" (UID: "934c8845-5845-4aac-a6f5-7a3b53cbfbc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.513631 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.518587 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-kube-api-access-znwsq" (OuterVolumeSpecName: "kube-api-access-znwsq") pod "934c8845-5845-4aac-a6f5-7a3b53cbfbc8" (UID: "934c8845-5845-4aac-a6f5-7a3b53cbfbc8"). InnerVolumeSpecName "kube-api-access-znwsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.604248 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "934c8845-5845-4aac-a6f5-7a3b53cbfbc8" (UID: "934c8845-5845-4aac-a6f5-7a3b53cbfbc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.614839 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znwsq\" (UniqueName: \"kubernetes.io/projected/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-kube-api-access-znwsq\") on node \"crc\" DevicePath \"\"" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.614888 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/934c8845-5845-4aac-a6f5-7a3b53cbfbc8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.672191 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" path="/var/lib/kubelet/pods/31cb4b42-1830-451e-9928-814c5ed54abd/volumes" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.839354 4773 generic.go:334] "Generic (PLEG): container finished" podID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerID="37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34" exitCode=0 Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.839396 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerDied","Data":"37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34"} Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.839428 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jp8r9" event={"ID":"934c8845-5845-4aac-a6f5-7a3b53cbfbc8","Type":"ContainerDied","Data":"de8bbd2708d2816bc2db489c256c4781ff69e77c71e89644d17181cd2975d179"} Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.839430 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jp8r9" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.839444 4773 scope.go:117] "RemoveContainer" containerID="37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.867415 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jp8r9"] Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.874328 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jp8r9"] Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.876629 4773 scope.go:117] "RemoveContainer" containerID="d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.898143 4773 scope.go:117] "RemoveContainer" containerID="2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.920268 4773 scope.go:117] "RemoveContainer" containerID="37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34" Jan 22 12:26:14 crc kubenswrapper[4773]: E0122 12:26:14.920878 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34\": container with ID starting with 37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34 not found: ID does not exist" containerID="37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.920914 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34"} err="failed to get container status \"37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34\": rpc error: code = NotFound desc = could not find container \"37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34\": container with ID starting with 37ddf9ddb5027f0652e4ad12bd920ed61df85b1eb566b27ba2d1e88c70796d34 not found: ID does not exist" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.920944 4773 scope.go:117] "RemoveContainer" containerID="d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5" Jan 22 12:26:14 crc kubenswrapper[4773]: E0122 12:26:14.921407 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5\": container with ID starting with d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5 not found: ID does not exist" containerID="d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.921428 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5"} err="failed to get container status \"d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5\": rpc error: code = NotFound desc = could not find container \"d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5\": container with ID starting with d75bc39937f721841c80e01dd5cbc5a471f118fffbfb50f4f06d94350aed9ff5 not found: ID does not exist" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.921443 4773 scope.go:117] "RemoveContainer" containerID="2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e" Jan 22 12:26:14 crc kubenswrapper[4773]: E0122 12:26:14.921959 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e\": container with ID starting with 2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e not found: ID does not exist" containerID="2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e" Jan 22 12:26:14 crc kubenswrapper[4773]: I0122 12:26:14.921987 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e"} err="failed to get container status \"2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e\": rpc error: code = NotFound desc = could not find container \"2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e\": container with ID starting with 2e0e47041b3ce8ed7ca92c61ff3a54bf39c434caabe04a2018fb68eda0b3f78e not found: ID does not exist" Jan 22 12:26:16 crc kubenswrapper[4773]: I0122 12:26:16.674084 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" path="/var/lib/kubelet/pods/934c8845-5845-4aac-a6f5-7a3b53cbfbc8/volumes" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.220444 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-krm66"] Jan 22 12:27:51 crc kubenswrapper[4773]: E0122 12:27:51.221624 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="registry-server" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.221648 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="registry-server" Jan 22 12:27:51 crc kubenswrapper[4773]: E0122 12:27:51.221685 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="extract-utilities" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.221696 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="extract-utilities" Jan 22 12:27:51 crc kubenswrapper[4773]: E0122 12:27:51.221720 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="registry-server" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.221731 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="registry-server" Jan 22 12:27:51 crc kubenswrapper[4773]: E0122 12:27:51.221751 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="extract-utilities" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.221763 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="extract-utilities" Jan 22 12:27:51 crc kubenswrapper[4773]: E0122 12:27:51.221776 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="extract-content" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.221786 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="extract-content" Jan 22 12:27:51 crc kubenswrapper[4773]: E0122 12:27:51.221810 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="extract-content" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.221821 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="extract-content" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.222081 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="934c8845-5845-4aac-a6f5-7a3b53cbfbc8" containerName="registry-server" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.222123 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="31cb4b42-1830-451e-9928-814c5ed54abd" containerName="registry-server" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.224011 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.243575 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-krm66"] Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.380175 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-catalog-content\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.380249 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-utilities\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.380562 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6fjs\" (UniqueName: \"kubernetes.io/projected/620c09bf-38a8-404f-97c8-b7fffb9c754d-kube-api-access-b6fjs\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.481745 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-utilities\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.481854 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6fjs\" (UniqueName: \"kubernetes.io/projected/620c09bf-38a8-404f-97c8-b7fffb9c754d-kube-api-access-b6fjs\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.481978 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-catalog-content\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.482554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-utilities\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.482583 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-catalog-content\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.503784 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6fjs\" (UniqueName: \"kubernetes.io/projected/620c09bf-38a8-404f-97c8-b7fffb9c754d-kube-api-access-b6fjs\") pod \"redhat-operators-krm66\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.561681 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:27:51 crc kubenswrapper[4773]: I0122 12:27:51.881063 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-krm66"] Jan 22 12:27:53 crc kubenswrapper[4773]: I0122 12:27:53.110369 4773 generic.go:334] "Generic (PLEG): container finished" podID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerID="2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919" exitCode=0 Jan 22 12:27:53 crc kubenswrapper[4773]: I0122 12:27:53.110566 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerDied","Data":"2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919"} Jan 22 12:27:53 crc kubenswrapper[4773]: I0122 12:27:53.110623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerStarted","Data":"3081430805b33964c2eb7134ef8102dc0cb80e72bb0010aa364c046385348a23"} Jan 22 12:27:55 crc kubenswrapper[4773]: I0122 12:27:55.130459 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerStarted","Data":"309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99"} Jan 22 12:27:56 crc kubenswrapper[4773]: I0122 12:27:56.141754 4773 generic.go:334] "Generic (PLEG): container finished" podID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerID="309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99" exitCode=0 Jan 22 12:27:56 crc kubenswrapper[4773]: I0122 12:27:56.141852 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerDied","Data":"309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99"} Jan 22 12:27:57 crc kubenswrapper[4773]: I0122 12:27:57.157325 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerStarted","Data":"b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412"} Jan 22 12:27:57 crc kubenswrapper[4773]: I0122 12:27:57.201912 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-krm66" podStartSLOduration=2.782190526 podStartE2EDuration="6.20184637s" podCreationTimestamp="2026-01-22 12:27:51 +0000 UTC" firstStartedPulling="2026-01-22 12:27:53.117665441 +0000 UTC m=+1980.695781256" lastFinishedPulling="2026-01-22 12:27:56.537321255 +0000 UTC m=+1984.115437100" observedRunningTime="2026-01-22 12:27:57.188963396 +0000 UTC m=+1984.767079221" watchObservedRunningTime="2026-01-22 12:27:57.20184637 +0000 UTC m=+1984.779962225" Jan 22 12:28:01 crc kubenswrapper[4773]: I0122 12:28:01.562007 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:28:01 crc kubenswrapper[4773]: I0122 12:28:01.562579 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:28:02 crc kubenswrapper[4773]: I0122 12:28:02.609583 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-krm66" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="registry-server" probeResult="failure" output=< Jan 22 12:28:02 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 12:28:02 crc kubenswrapper[4773]: > Jan 22 12:28:11 crc kubenswrapper[4773]: I0122 12:28:11.648540 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:28:11 crc kubenswrapper[4773]: I0122 12:28:11.713482 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:28:11 crc kubenswrapper[4773]: I0122 12:28:11.903160 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-krm66"] Jan 22 12:28:13 crc kubenswrapper[4773]: I0122 12:28:13.454202 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-krm66" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="registry-server" containerID="cri-o://b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412" gracePeriod=2 Jan 22 12:28:13 crc kubenswrapper[4773]: I0122 12:28:13.826808 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.026458 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-utilities\") pod \"620c09bf-38a8-404f-97c8-b7fffb9c754d\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.026749 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6fjs\" (UniqueName: \"kubernetes.io/projected/620c09bf-38a8-404f-97c8-b7fffb9c754d-kube-api-access-b6fjs\") pod \"620c09bf-38a8-404f-97c8-b7fffb9c754d\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.026843 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-catalog-content\") pod \"620c09bf-38a8-404f-97c8-b7fffb9c754d\" (UID: \"620c09bf-38a8-404f-97c8-b7fffb9c754d\") " Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.028539 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-utilities" (OuterVolumeSpecName: "utilities") pod "620c09bf-38a8-404f-97c8-b7fffb9c754d" (UID: "620c09bf-38a8-404f-97c8-b7fffb9c754d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.033040 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/620c09bf-38a8-404f-97c8-b7fffb9c754d-kube-api-access-b6fjs" (OuterVolumeSpecName: "kube-api-access-b6fjs") pod "620c09bf-38a8-404f-97c8-b7fffb9c754d" (UID: "620c09bf-38a8-404f-97c8-b7fffb9c754d"). InnerVolumeSpecName "kube-api-access-b6fjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.128318 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.128605 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6fjs\" (UniqueName: \"kubernetes.io/projected/620c09bf-38a8-404f-97c8-b7fffb9c754d-kube-api-access-b6fjs\") on node \"crc\" DevicePath \"\"" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.175354 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "620c09bf-38a8-404f-97c8-b7fffb9c754d" (UID: "620c09bf-38a8-404f-97c8-b7fffb9c754d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.230207 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/620c09bf-38a8-404f-97c8-b7fffb9c754d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.464529 4773 generic.go:334] "Generic (PLEG): container finished" podID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerID="b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412" exitCode=0 Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.464589 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-krm66" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.464580 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerDied","Data":"b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412"} Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.466609 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-krm66" event={"ID":"620c09bf-38a8-404f-97c8-b7fffb9c754d","Type":"ContainerDied","Data":"3081430805b33964c2eb7134ef8102dc0cb80e72bb0010aa364c046385348a23"} Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.466705 4773 scope.go:117] "RemoveContainer" containerID="b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.489408 4773 scope.go:117] "RemoveContainer" containerID="309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.506600 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-krm66"] Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.513093 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-krm66"] Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.520059 4773 scope.go:117] "RemoveContainer" containerID="2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.549129 4773 scope.go:117] "RemoveContainer" containerID="b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412" Jan 22 12:28:14 crc kubenswrapper[4773]: E0122 12:28:14.549961 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412\": container with ID starting with b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412 not found: ID does not exist" containerID="b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.550018 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412"} err="failed to get container status \"b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412\": rpc error: code = NotFound desc = could not find container \"b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412\": container with ID starting with b3fa4c535584f79e8c6446c5cbba57c1c9f80c693dcd128fae6220f2f56ec412 not found: ID does not exist" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.550048 4773 scope.go:117] "RemoveContainer" containerID="309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99" Jan 22 12:28:14 crc kubenswrapper[4773]: E0122 12:28:14.550654 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99\": container with ID starting with 309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99 not found: ID does not exist" containerID="309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.550693 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99"} err="failed to get container status \"309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99\": rpc error: code = NotFound desc = could not find container \"309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99\": container with ID starting with 309305b3b81e19546ee21ee61acdde8f367ba6d815800fb9f6ebc558bbe04d99 not found: ID does not exist" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.550707 4773 scope.go:117] "RemoveContainer" containerID="2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919" Jan 22 12:28:14 crc kubenswrapper[4773]: E0122 12:28:14.551084 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919\": container with ID starting with 2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919 not found: ID does not exist" containerID="2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.551124 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919"} err="failed to get container status \"2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919\": rpc error: code = NotFound desc = could not find container \"2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919\": container with ID starting with 2ae654728552aba543ef7baffa42789238b56e8818aaccedb0dcb5ca98068919 not found: ID does not exist" Jan 22 12:28:14 crc kubenswrapper[4773]: I0122 12:28:14.673957 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" path="/var/lib/kubelet/pods/620c09bf-38a8-404f-97c8-b7fffb9c754d/volumes" Jan 22 12:28:34 crc kubenswrapper[4773]: I0122 12:28:34.074656 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:28:34 crc kubenswrapper[4773]: I0122 12:28:34.075276 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:29:04 crc kubenswrapper[4773]: I0122 12:29:04.074477 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:29:04 crc kubenswrapper[4773]: I0122 12:29:04.075057 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:29:34 crc kubenswrapper[4773]: I0122 12:29:34.073950 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:29:34 crc kubenswrapper[4773]: I0122 12:29:34.077113 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:29:34 crc kubenswrapper[4773]: I0122 12:29:34.077436 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:29:34 crc kubenswrapper[4773]: I0122 12:29:34.078456 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eb3716c89db4b8ac7200ec86afec026406c4026858a60bd199ed8fb4bba2ea89"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:29:34 crc kubenswrapper[4773]: I0122 12:29:34.078734 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://eb3716c89db4b8ac7200ec86afec026406c4026858a60bd199ed8fb4bba2ea89" gracePeriod=600 Jan 22 12:29:35 crc kubenswrapper[4773]: I0122 12:29:35.117895 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="eb3716c89db4b8ac7200ec86afec026406c4026858a60bd199ed8fb4bba2ea89" exitCode=0 Jan 22 12:29:35 crc kubenswrapper[4773]: I0122 12:29:35.118043 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"eb3716c89db4b8ac7200ec86afec026406c4026858a60bd199ed8fb4bba2ea89"} Jan 22 12:29:35 crc kubenswrapper[4773]: I0122 12:29:35.119564 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac"} Jan 22 12:29:35 crc kubenswrapper[4773]: I0122 12:29:35.119611 4773 scope.go:117] "RemoveContainer" containerID="39f4957698c5099647db4bfd0903985dcdd22d75e0d811793b6cccf3020a2dad" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.149034 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc"] Jan 22 12:30:00 crc kubenswrapper[4773]: E0122 12:30:00.149946 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="registry-server" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.149962 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="registry-server" Jan 22 12:30:00 crc kubenswrapper[4773]: E0122 12:30:00.149975 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="extract-utilities" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.149982 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="extract-utilities" Jan 22 12:30:00 crc kubenswrapper[4773]: E0122 12:30:00.149994 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="extract-content" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.150001 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="extract-content" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.150184 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="620c09bf-38a8-404f-97c8-b7fffb9c754d" containerName="registry-server" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.150788 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.153276 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.153963 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.160550 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc"] Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.276149 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-config-volume\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.276227 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6v5w\" (UniqueName: \"kubernetes.io/projected/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-kube-api-access-z6v5w\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.276328 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-secret-volume\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.393494 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6v5w\" (UniqueName: \"kubernetes.io/projected/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-kube-api-access-z6v5w\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.393690 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-secret-volume\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.393755 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-config-volume\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.396955 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-config-volume\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.404879 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-secret-volume\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.427203 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6v5w\" (UniqueName: \"kubernetes.io/projected/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-kube-api-access-z6v5w\") pod \"collect-profiles-29484750-trskc\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.476036 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:00 crc kubenswrapper[4773]: I0122 12:30:00.700763 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc"] Jan 22 12:30:01 crc kubenswrapper[4773]: I0122 12:30:01.358118 4773 generic.go:334] "Generic (PLEG): container finished" podID="5b997907-7bc8-4d67-96a0-eacb8edc2bbd" containerID="f9a9743f0629d424353b0e303711e967d587c657f37d582b22e16b065aedc0ed" exitCode=0 Jan 22 12:30:01 crc kubenswrapper[4773]: I0122 12:30:01.358177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" event={"ID":"5b997907-7bc8-4d67-96a0-eacb8edc2bbd","Type":"ContainerDied","Data":"f9a9743f0629d424353b0e303711e967d587c657f37d582b22e16b065aedc0ed"} Jan 22 12:30:01 crc kubenswrapper[4773]: I0122 12:30:01.358561 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" event={"ID":"5b997907-7bc8-4d67-96a0-eacb8edc2bbd","Type":"ContainerStarted","Data":"e67ded4c1750e698564770ab6917992a9976d02efd9a5f8bd358dca8f6b67b6a"} Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.742027 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.933848 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-config-volume\") pod \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.934325 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6v5w\" (UniqueName: \"kubernetes.io/projected/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-kube-api-access-z6v5w\") pod \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.934386 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-secret-volume\") pod \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\" (UID: \"5b997907-7bc8-4d67-96a0-eacb8edc2bbd\") " Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.936402 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-config-volume" (OuterVolumeSpecName: "config-volume") pod "5b997907-7bc8-4d67-96a0-eacb8edc2bbd" (UID: "5b997907-7bc8-4d67-96a0-eacb8edc2bbd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.943524 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-kube-api-access-z6v5w" (OuterVolumeSpecName: "kube-api-access-z6v5w") pod "5b997907-7bc8-4d67-96a0-eacb8edc2bbd" (UID: "5b997907-7bc8-4d67-96a0-eacb8edc2bbd"). InnerVolumeSpecName "kube-api-access-z6v5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:30:02 crc kubenswrapper[4773]: I0122 12:30:02.944473 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5b997907-7bc8-4d67-96a0-eacb8edc2bbd" (UID: "5b997907-7bc8-4d67-96a0-eacb8edc2bbd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.036352 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6v5w\" (UniqueName: \"kubernetes.io/projected/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-kube-api-access-z6v5w\") on node \"crc\" DevicePath \"\"" Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.036387 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.036397 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b997907-7bc8-4d67-96a0-eacb8edc2bbd-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.384927 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" event={"ID":"5b997907-7bc8-4d67-96a0-eacb8edc2bbd","Type":"ContainerDied","Data":"e67ded4c1750e698564770ab6917992a9976d02efd9a5f8bd358dca8f6b67b6a"} Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.384973 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e67ded4c1750e698564770ab6917992a9976d02efd9a5f8bd358dca8f6b67b6a" Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.385027 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc" Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.838895 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc"] Jan 22 12:30:03 crc kubenswrapper[4773]: I0122 12:30:03.853020 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484705-kmcsc"] Jan 22 12:30:04 crc kubenswrapper[4773]: I0122 12:30:04.667614 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8dcf163-8c98-4db0-95e5-1d96e45a619c" path="/var/lib/kubelet/pods/a8dcf163-8c98-4db0-95e5-1d96e45a619c/volumes" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.510623 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b284l"] Jan 22 12:30:43 crc kubenswrapper[4773]: E0122 12:30:43.511971 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b997907-7bc8-4d67-96a0-eacb8edc2bbd" containerName="collect-profiles" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.511997 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b997907-7bc8-4d67-96a0-eacb8edc2bbd" containerName="collect-profiles" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.512282 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b997907-7bc8-4d67-96a0-eacb8edc2bbd" containerName="collect-profiles" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.513985 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.523674 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b284l"] Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.528278 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-catalog-content\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.528429 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqttw\" (UniqueName: \"kubernetes.io/projected/3cc48769-ee7f-4eb5-8324-69144780fce5-kube-api-access-kqttw\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.528502 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-utilities\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.629382 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-utilities\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.629444 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-catalog-content\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.629523 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqttw\" (UniqueName: \"kubernetes.io/projected/3cc48769-ee7f-4eb5-8324-69144780fce5-kube-api-access-kqttw\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.630482 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-utilities\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.630566 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-catalog-content\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.652401 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqttw\" (UniqueName: \"kubernetes.io/projected/3cc48769-ee7f-4eb5-8324-69144780fce5-kube-api-access-kqttw\") pod \"certified-operators-b284l\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:43 crc kubenswrapper[4773]: I0122 12:30:43.863404 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:44 crc kubenswrapper[4773]: I0122 12:30:44.409742 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b284l"] Jan 22 12:30:44 crc kubenswrapper[4773]: I0122 12:30:44.733819 4773 generic.go:334] "Generic (PLEG): container finished" podID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerID="6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7" exitCode=0 Jan 22 12:30:44 crc kubenswrapper[4773]: I0122 12:30:44.734079 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b284l" event={"ID":"3cc48769-ee7f-4eb5-8324-69144780fce5","Type":"ContainerDied","Data":"6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7"} Jan 22 12:30:44 crc kubenswrapper[4773]: I0122 12:30:44.734152 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b284l" event={"ID":"3cc48769-ee7f-4eb5-8324-69144780fce5","Type":"ContainerStarted","Data":"6a7f63a3899f36ff8d4da23844b99b33aa92183d81f09c4dc0de87e1a585efc7"} Jan 22 12:30:46 crc kubenswrapper[4773]: I0122 12:30:46.755501 4773 generic.go:334] "Generic (PLEG): container finished" podID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerID="461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf" exitCode=0 Jan 22 12:30:46 crc kubenswrapper[4773]: I0122 12:30:46.755636 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b284l" event={"ID":"3cc48769-ee7f-4eb5-8324-69144780fce5","Type":"ContainerDied","Data":"461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf"} Jan 22 12:30:47 crc kubenswrapper[4773]: I0122 12:30:47.769759 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b284l" event={"ID":"3cc48769-ee7f-4eb5-8324-69144780fce5","Type":"ContainerStarted","Data":"bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b"} Jan 22 12:30:47 crc kubenswrapper[4773]: I0122 12:30:47.794299 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b284l" podStartSLOduration=2.355026984 podStartE2EDuration="4.794259942s" podCreationTimestamp="2026-01-22 12:30:43 +0000 UTC" firstStartedPulling="2026-01-22 12:30:44.739315198 +0000 UTC m=+2152.317431063" lastFinishedPulling="2026-01-22 12:30:47.178548156 +0000 UTC m=+2154.756664021" observedRunningTime="2026-01-22 12:30:47.791754301 +0000 UTC m=+2155.369870186" watchObservedRunningTime="2026-01-22 12:30:47.794259942 +0000 UTC m=+2155.372375767" Jan 22 12:30:53 crc kubenswrapper[4773]: I0122 12:30:53.864476 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:53 crc kubenswrapper[4773]: I0122 12:30:53.865110 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:53 crc kubenswrapper[4773]: I0122 12:30:53.940933 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:54 crc kubenswrapper[4773]: I0122 12:30:54.907926 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:54 crc kubenswrapper[4773]: I0122 12:30:54.960801 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b284l"] Jan 22 12:30:56 crc kubenswrapper[4773]: I0122 12:30:56.844488 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-b284l" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="registry-server" containerID="cri-o://bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b" gracePeriod=2 Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.736421 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.858361 4773 generic.go:334] "Generic (PLEG): container finished" podID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerID="bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b" exitCode=0 Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.858407 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b284l" event={"ID":"3cc48769-ee7f-4eb5-8324-69144780fce5","Type":"ContainerDied","Data":"bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b"} Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.858437 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b284l" event={"ID":"3cc48769-ee7f-4eb5-8324-69144780fce5","Type":"ContainerDied","Data":"6a7f63a3899f36ff8d4da23844b99b33aa92183d81f09c4dc0de87e1a585efc7"} Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.858460 4773 scope.go:117] "RemoveContainer" containerID="bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.858630 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b284l" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.864897 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqttw\" (UniqueName: \"kubernetes.io/projected/3cc48769-ee7f-4eb5-8324-69144780fce5-kube-api-access-kqttw\") pod \"3cc48769-ee7f-4eb5-8324-69144780fce5\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.864986 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-catalog-content\") pod \"3cc48769-ee7f-4eb5-8324-69144780fce5\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.865026 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-utilities\") pod \"3cc48769-ee7f-4eb5-8324-69144780fce5\" (UID: \"3cc48769-ee7f-4eb5-8324-69144780fce5\") " Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.868500 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-utilities" (OuterVolumeSpecName: "utilities") pod "3cc48769-ee7f-4eb5-8324-69144780fce5" (UID: "3cc48769-ee7f-4eb5-8324-69144780fce5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.872302 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cc48769-ee7f-4eb5-8324-69144780fce5-kube-api-access-kqttw" (OuterVolumeSpecName: "kube-api-access-kqttw") pod "3cc48769-ee7f-4eb5-8324-69144780fce5" (UID: "3cc48769-ee7f-4eb5-8324-69144780fce5"). InnerVolumeSpecName "kube-api-access-kqttw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.912921 4773 scope.go:117] "RemoveContainer" containerID="461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.928630 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3cc48769-ee7f-4eb5-8324-69144780fce5" (UID: "3cc48769-ee7f-4eb5-8324-69144780fce5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.934766 4773 scope.go:117] "RemoveContainer" containerID="6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.959378 4773 scope.go:117] "RemoveContainer" containerID="bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b" Jan 22 12:30:57 crc kubenswrapper[4773]: E0122 12:30:57.960116 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b\": container with ID starting with bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b not found: ID does not exist" containerID="bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.960159 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b"} err="failed to get container status \"bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b\": rpc error: code = NotFound desc = could not find container \"bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b\": container with ID starting with bf611de7d8dd41db642c395477f67122764336ec2a2376e5803a2294e249c19b not found: ID does not exist" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.960186 4773 scope.go:117] "RemoveContainer" containerID="461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf" Jan 22 12:30:57 crc kubenswrapper[4773]: E0122 12:30:57.960514 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf\": container with ID starting with 461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf not found: ID does not exist" containerID="461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.960545 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf"} err="failed to get container status \"461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf\": rpc error: code = NotFound desc = could not find container \"461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf\": container with ID starting with 461bfd86de1c283e8272616e934774484e1789ccd2373ef078cab4a179599daf not found: ID does not exist" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.960562 4773 scope.go:117] "RemoveContainer" containerID="6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7" Jan 22 12:30:57 crc kubenswrapper[4773]: E0122 12:30:57.960799 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7\": container with ID starting with 6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7 not found: ID does not exist" containerID="6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.960821 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7"} err="failed to get container status \"6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7\": rpc error: code = NotFound desc = could not find container \"6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7\": container with ID starting with 6dc88d73b7fa6002c2e56c2ee1eb733cbf7f7a1b60aee2814f360cb8537d5cf7 not found: ID does not exist" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.966956 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqttw\" (UniqueName: \"kubernetes.io/projected/3cc48769-ee7f-4eb5-8324-69144780fce5-kube-api-access-kqttw\") on node \"crc\" DevicePath \"\"" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.966985 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:30:57 crc kubenswrapper[4773]: I0122 12:30:57.966995 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cc48769-ee7f-4eb5-8324-69144780fce5-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:30:58 crc kubenswrapper[4773]: I0122 12:30:58.191356 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b284l"] Jan 22 12:30:58 crc kubenswrapper[4773]: I0122 12:30:58.197991 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-b284l"] Jan 22 12:30:58 crc kubenswrapper[4773]: I0122 12:30:58.667059 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" path="/var/lib/kubelet/pods/3cc48769-ee7f-4eb5-8324-69144780fce5/volumes" Jan 22 12:31:02 crc kubenswrapper[4773]: I0122 12:31:02.417204 4773 scope.go:117] "RemoveContainer" containerID="ce0f364ec23d46638c2b8943d476778894a8a9b7faa8359e7f4764cc1c6c3d67" Jan 22 12:31:34 crc kubenswrapper[4773]: I0122 12:31:34.074661 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:31:34 crc kubenswrapper[4773]: I0122 12:31:34.079147 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:32:04 crc kubenswrapper[4773]: I0122 12:32:04.074902 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:32:04 crc kubenswrapper[4773]: I0122 12:32:04.075515 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.074759 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.075417 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.075510 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.077553 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.077842 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" gracePeriod=600 Jan 22 12:32:34 crc kubenswrapper[4773]: E0122 12:32:34.200334 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.731818 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" exitCode=0 Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.731913 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac"} Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.732030 4773 scope.go:117] "RemoveContainer" containerID="eb3716c89db4b8ac7200ec86afec026406c4026858a60bd199ed8fb4bba2ea89" Jan 22 12:32:34 crc kubenswrapper[4773]: I0122 12:32:34.733112 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:32:34 crc kubenswrapper[4773]: E0122 12:32:34.733638 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:32:47 crc kubenswrapper[4773]: I0122 12:32:47.743774 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:32:47 crc kubenswrapper[4773]: E0122 12:32:47.744491 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:32:58 crc kubenswrapper[4773]: I0122 12:32:58.658646 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:32:58 crc kubenswrapper[4773]: E0122 12:32:58.659376 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:33:10 crc kubenswrapper[4773]: I0122 12:33:10.659088 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:33:10 crc kubenswrapper[4773]: E0122 12:33:10.659975 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:33:24 crc kubenswrapper[4773]: I0122 12:33:24.658175 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:33:24 crc kubenswrapper[4773]: E0122 12:33:24.659019 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:33:37 crc kubenswrapper[4773]: I0122 12:33:37.658486 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:33:37 crc kubenswrapper[4773]: E0122 12:33:37.659606 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:33:49 crc kubenswrapper[4773]: I0122 12:33:49.658607 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:33:49 crc kubenswrapper[4773]: E0122 12:33:49.659691 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:34:01 crc kubenswrapper[4773]: I0122 12:34:01.658078 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:34:01 crc kubenswrapper[4773]: E0122 12:34:01.659032 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:34:12 crc kubenswrapper[4773]: I0122 12:34:12.662931 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:34:12 crc kubenswrapper[4773]: E0122 12:34:12.663879 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:34:25 crc kubenswrapper[4773]: I0122 12:34:25.658919 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:34:25 crc kubenswrapper[4773]: E0122 12:34:25.660369 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:34:39 crc kubenswrapper[4773]: I0122 12:34:39.658073 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:34:39 crc kubenswrapper[4773]: E0122 12:34:39.658771 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:34:53 crc kubenswrapper[4773]: I0122 12:34:53.658496 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:34:53 crc kubenswrapper[4773]: E0122 12:34:53.659354 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:35:06 crc kubenswrapper[4773]: I0122 12:35:06.659443 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:35:06 crc kubenswrapper[4773]: E0122 12:35:06.660060 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:35:21 crc kubenswrapper[4773]: I0122 12:35:21.658370 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:35:21 crc kubenswrapper[4773]: E0122 12:35:21.659756 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:35:34 crc kubenswrapper[4773]: I0122 12:35:34.658823 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:35:34 crc kubenswrapper[4773]: E0122 12:35:34.659589 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:35:45 crc kubenswrapper[4773]: I0122 12:35:45.657590 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:35:45 crc kubenswrapper[4773]: E0122 12:35:45.658242 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:35:59 crc kubenswrapper[4773]: I0122 12:35:59.658735 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:35:59 crc kubenswrapper[4773]: E0122 12:35:59.659720 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.489919 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hwqrp"] Jan 22 12:36:09 crc kubenswrapper[4773]: E0122 12:36:09.490836 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="extract-content" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.490856 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="extract-content" Jan 22 12:36:09 crc kubenswrapper[4773]: E0122 12:36:09.490871 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="extract-utilities" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.490880 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="extract-utilities" Jan 22 12:36:09 crc kubenswrapper[4773]: E0122 12:36:09.490890 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="registry-server" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.490899 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="registry-server" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.491131 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cc48769-ee7f-4eb5-8324-69144780fce5" containerName="registry-server" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.492941 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.501545 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hwqrp"] Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.662795 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-utilities\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.663149 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-catalog-content\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.663197 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v8hd\" (UniqueName: \"kubernetes.io/projected/a69ba0fe-7996-4d24-8b39-655de404027f-kube-api-access-7v8hd\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.764523 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-catalog-content\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.764646 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v8hd\" (UniqueName: \"kubernetes.io/projected/a69ba0fe-7996-4d24-8b39-655de404027f-kube-api-access-7v8hd\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.764797 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-utilities\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.765395 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-utilities\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.765529 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-catalog-content\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.786042 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v8hd\" (UniqueName: \"kubernetes.io/projected/a69ba0fe-7996-4d24-8b39-655de404027f-kube-api-access-7v8hd\") pod \"community-operators-hwqrp\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:09 crc kubenswrapper[4773]: I0122 12:36:09.825237 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:10 crc kubenswrapper[4773]: I0122 12:36:10.517805 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hwqrp"] Jan 22 12:36:11 crc kubenswrapper[4773]: I0122 12:36:11.226938 4773 generic.go:334] "Generic (PLEG): container finished" podID="a69ba0fe-7996-4d24-8b39-655de404027f" containerID="0fe7e2318832737d86fe600c1393556474f77a6f16477cce10a55943aca51351" exitCode=0 Jan 22 12:36:11 crc kubenswrapper[4773]: I0122 12:36:11.227069 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerDied","Data":"0fe7e2318832737d86fe600c1393556474f77a6f16477cce10a55943aca51351"} Jan 22 12:36:11 crc kubenswrapper[4773]: I0122 12:36:11.227505 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerStarted","Data":"17f2862b45f932a5d36dffb97d099518ea4c4436d0fad0fda805d41af12d565f"} Jan 22 12:36:11 crc kubenswrapper[4773]: I0122 12:36:11.229409 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:36:12 crc kubenswrapper[4773]: I0122 12:36:12.237757 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerStarted","Data":"a3fb385ad3726b9bb8abc90ae7b4e03a721c764be3a5af54413fa82e56b6ff23"} Jan 22 12:36:13 crc kubenswrapper[4773]: I0122 12:36:13.249505 4773 generic.go:334] "Generic (PLEG): container finished" podID="a69ba0fe-7996-4d24-8b39-655de404027f" containerID="a3fb385ad3726b9bb8abc90ae7b4e03a721c764be3a5af54413fa82e56b6ff23" exitCode=0 Jan 22 12:36:13 crc kubenswrapper[4773]: I0122 12:36:13.249598 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerDied","Data":"a3fb385ad3726b9bb8abc90ae7b4e03a721c764be3a5af54413fa82e56b6ff23"} Jan 22 12:36:14 crc kubenswrapper[4773]: I0122 12:36:14.260732 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerStarted","Data":"c426b391503107adf1fb26c4f9d046d0a66e65903b251ccf1b2e308232b9e373"} Jan 22 12:36:14 crc kubenswrapper[4773]: I0122 12:36:14.286468 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hwqrp" podStartSLOduration=2.859859587 podStartE2EDuration="5.286436269s" podCreationTimestamp="2026-01-22 12:36:09 +0000 UTC" firstStartedPulling="2026-01-22 12:36:11.229055763 +0000 UTC m=+2478.807171598" lastFinishedPulling="2026-01-22 12:36:13.655632435 +0000 UTC m=+2481.233748280" observedRunningTime="2026-01-22 12:36:14.283126456 +0000 UTC m=+2481.861242291" watchObservedRunningTime="2026-01-22 12:36:14.286436269 +0000 UTC m=+2481.864552094" Jan 22 12:36:14 crc kubenswrapper[4773]: I0122 12:36:14.658264 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:36:14 crc kubenswrapper[4773]: E0122 12:36:14.658475 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:36:19 crc kubenswrapper[4773]: I0122 12:36:19.825731 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:19 crc kubenswrapper[4773]: I0122 12:36:19.826299 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:19 crc kubenswrapper[4773]: I0122 12:36:19.869937 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:20 crc kubenswrapper[4773]: I0122 12:36:20.371697 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:20 crc kubenswrapper[4773]: I0122 12:36:20.436878 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hwqrp"] Jan 22 12:36:22 crc kubenswrapper[4773]: I0122 12:36:22.324159 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hwqrp" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="registry-server" containerID="cri-o://c426b391503107adf1fb26c4f9d046d0a66e65903b251ccf1b2e308232b9e373" gracePeriod=2 Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.335709 4773 generic.go:334] "Generic (PLEG): container finished" podID="a69ba0fe-7996-4d24-8b39-655de404027f" containerID="c426b391503107adf1fb26c4f9d046d0a66e65903b251ccf1b2e308232b9e373" exitCode=0 Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.335925 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerDied","Data":"c426b391503107adf1fb26c4f9d046d0a66e65903b251ccf1b2e308232b9e373"} Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.336082 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hwqrp" event={"ID":"a69ba0fe-7996-4d24-8b39-655de404027f","Type":"ContainerDied","Data":"17f2862b45f932a5d36dffb97d099518ea4c4436d0fad0fda805d41af12d565f"} Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.336121 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17f2862b45f932a5d36dffb97d099518ea4c4436d0fad0fda805d41af12d565f" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.351107 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.497925 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-catalog-content\") pod \"a69ba0fe-7996-4d24-8b39-655de404027f\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.498164 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v8hd\" (UniqueName: \"kubernetes.io/projected/a69ba0fe-7996-4d24-8b39-655de404027f-kube-api-access-7v8hd\") pod \"a69ba0fe-7996-4d24-8b39-655de404027f\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.498251 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-utilities\") pod \"a69ba0fe-7996-4d24-8b39-655de404027f\" (UID: \"a69ba0fe-7996-4d24-8b39-655de404027f\") " Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.499888 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-utilities" (OuterVolumeSpecName: "utilities") pod "a69ba0fe-7996-4d24-8b39-655de404027f" (UID: "a69ba0fe-7996-4d24-8b39-655de404027f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.505504 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a69ba0fe-7996-4d24-8b39-655de404027f-kube-api-access-7v8hd" (OuterVolumeSpecName: "kube-api-access-7v8hd") pod "a69ba0fe-7996-4d24-8b39-655de404027f" (UID: "a69ba0fe-7996-4d24-8b39-655de404027f"). InnerVolumeSpecName "kube-api-access-7v8hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.550733 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a69ba0fe-7996-4d24-8b39-655de404027f" (UID: "a69ba0fe-7996-4d24-8b39-655de404027f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.600939 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v8hd\" (UniqueName: \"kubernetes.io/projected/a69ba0fe-7996-4d24-8b39-655de404027f-kube-api-access-7v8hd\") on node \"crc\" DevicePath \"\"" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.601270 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:36:23 crc kubenswrapper[4773]: I0122 12:36:23.601449 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a69ba0fe-7996-4d24-8b39-655de404027f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:36:24 crc kubenswrapper[4773]: I0122 12:36:24.346627 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hwqrp" Jan 22 12:36:24 crc kubenswrapper[4773]: I0122 12:36:24.402979 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hwqrp"] Jan 22 12:36:24 crc kubenswrapper[4773]: I0122 12:36:24.412314 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hwqrp"] Jan 22 12:36:24 crc kubenswrapper[4773]: I0122 12:36:24.674812 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" path="/var/lib/kubelet/pods/a69ba0fe-7996-4d24-8b39-655de404027f/volumes" Jan 22 12:36:28 crc kubenswrapper[4773]: I0122 12:36:28.658139 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:36:28 crc kubenswrapper[4773]: E0122 12:36:28.658961 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:36:41 crc kubenswrapper[4773]: I0122 12:36:41.657984 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:36:41 crc kubenswrapper[4773]: E0122 12:36:41.659451 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.877427 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hxkjq"] Jan 22 12:36:50 crc kubenswrapper[4773]: E0122 12:36:50.878426 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="extract-utilities" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.878450 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="extract-utilities" Jan 22 12:36:50 crc kubenswrapper[4773]: E0122 12:36:50.878480 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="registry-server" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.878494 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="registry-server" Jan 22 12:36:50 crc kubenswrapper[4773]: E0122 12:36:50.878525 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="extract-content" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.878535 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="extract-content" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.878742 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a69ba0fe-7996-4d24-8b39-655de404027f" containerName="registry-server" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.880498 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.894854 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxkjq"] Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.954501 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-catalog-content\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.954571 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw4xb\" (UniqueName: \"kubernetes.io/projected/bab565d9-eba0-4cb2-8192-da92fb4f12d7-kube-api-access-qw4xb\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:50 crc kubenswrapper[4773]: I0122 12:36:50.954618 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-utilities\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.056333 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-catalog-content\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.056455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw4xb\" (UniqueName: \"kubernetes.io/projected/bab565d9-eba0-4cb2-8192-da92fb4f12d7-kube-api-access-qw4xb\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.056524 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-utilities\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.056909 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-catalog-content\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.057136 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-utilities\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.079870 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw4xb\" (UniqueName: \"kubernetes.io/projected/bab565d9-eba0-4cb2-8192-da92fb4f12d7-kube-api-access-qw4xb\") pod \"redhat-marketplace-hxkjq\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.238223 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:36:51 crc kubenswrapper[4773]: I0122 12:36:51.692111 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxkjq"] Jan 22 12:36:52 crc kubenswrapper[4773]: I0122 12:36:52.621538 4773 generic.go:334] "Generic (PLEG): container finished" podID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerID="fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12" exitCode=0 Jan 22 12:36:52 crc kubenswrapper[4773]: I0122 12:36:52.621794 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerDied","Data":"fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12"} Jan 22 12:36:52 crc kubenswrapper[4773]: I0122 12:36:52.621824 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerStarted","Data":"abfb4743452d73fc99069dc5025d3e9ad21ead7132a606cad105060b059fc948"} Jan 22 12:36:53 crc kubenswrapper[4773]: I0122 12:36:53.629974 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerStarted","Data":"c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f"} Jan 22 12:36:53 crc kubenswrapper[4773]: I0122 12:36:53.658216 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:36:53 crc kubenswrapper[4773]: E0122 12:36:53.658472 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:36:54 crc kubenswrapper[4773]: I0122 12:36:54.638415 4773 generic.go:334] "Generic (PLEG): container finished" podID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerID="c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f" exitCode=0 Jan 22 12:36:54 crc kubenswrapper[4773]: I0122 12:36:54.638474 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerDied","Data":"c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f"} Jan 22 12:36:55 crc kubenswrapper[4773]: I0122 12:36:55.651323 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerStarted","Data":"a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5"} Jan 22 12:36:55 crc kubenswrapper[4773]: I0122 12:36:55.679990 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hxkjq" podStartSLOduration=3.294449125 podStartE2EDuration="5.679960736s" podCreationTimestamp="2026-01-22 12:36:50 +0000 UTC" firstStartedPulling="2026-01-22 12:36:52.624543504 +0000 UTC m=+2520.202659329" lastFinishedPulling="2026-01-22 12:36:55.010055075 +0000 UTC m=+2522.588170940" observedRunningTime="2026-01-22 12:36:55.671101627 +0000 UTC m=+2523.249217502" watchObservedRunningTime="2026-01-22 12:36:55.679960736 +0000 UTC m=+2523.258076931" Jan 22 12:37:01 crc kubenswrapper[4773]: I0122 12:37:01.238708 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:37:01 crc kubenswrapper[4773]: I0122 12:37:01.239545 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:37:01 crc kubenswrapper[4773]: I0122 12:37:01.306420 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:37:01 crc kubenswrapper[4773]: I0122 12:37:01.757889 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:37:01 crc kubenswrapper[4773]: I0122 12:37:01.805412 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxkjq"] Jan 22 12:37:03 crc kubenswrapper[4773]: I0122 12:37:03.708105 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hxkjq" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="registry-server" containerID="cri-o://a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5" gracePeriod=2 Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.088632 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.160791 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-utilities\") pod \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.160871 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-catalog-content\") pod \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.160895 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw4xb\" (UniqueName: \"kubernetes.io/projected/bab565d9-eba0-4cb2-8192-da92fb4f12d7-kube-api-access-qw4xb\") pod \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\" (UID: \"bab565d9-eba0-4cb2-8192-da92fb4f12d7\") " Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.162893 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-utilities" (OuterVolumeSpecName: "utilities") pod "bab565d9-eba0-4cb2-8192-da92fb4f12d7" (UID: "bab565d9-eba0-4cb2-8192-da92fb4f12d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.169734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bab565d9-eba0-4cb2-8192-da92fb4f12d7-kube-api-access-qw4xb" (OuterVolumeSpecName: "kube-api-access-qw4xb") pod "bab565d9-eba0-4cb2-8192-da92fb4f12d7" (UID: "bab565d9-eba0-4cb2-8192-da92fb4f12d7"). InnerVolumeSpecName "kube-api-access-qw4xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.254877 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bab565d9-eba0-4cb2-8192-da92fb4f12d7" (UID: "bab565d9-eba0-4cb2-8192-da92fb4f12d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.262396 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw4xb\" (UniqueName: \"kubernetes.io/projected/bab565d9-eba0-4cb2-8192-da92fb4f12d7-kube-api-access-qw4xb\") on node \"crc\" DevicePath \"\"" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.262549 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.262557 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bab565d9-eba0-4cb2-8192-da92fb4f12d7-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.721328 4773 generic.go:334] "Generic (PLEG): container finished" podID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerID="a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5" exitCode=0 Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.721372 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hxkjq" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.721398 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerDied","Data":"a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5"} Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.721431 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hxkjq" event={"ID":"bab565d9-eba0-4cb2-8192-da92fb4f12d7","Type":"ContainerDied","Data":"abfb4743452d73fc99069dc5025d3e9ad21ead7132a606cad105060b059fc948"} Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.721451 4773 scope.go:117] "RemoveContainer" containerID="a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.749543 4773 scope.go:117] "RemoveContainer" containerID="c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.754316 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxkjq"] Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.762068 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hxkjq"] Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.771269 4773 scope.go:117] "RemoveContainer" containerID="fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.801371 4773 scope.go:117] "RemoveContainer" containerID="a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5" Jan 22 12:37:04 crc kubenswrapper[4773]: E0122 12:37:04.801877 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5\": container with ID starting with a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5 not found: ID does not exist" containerID="a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.801918 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5"} err="failed to get container status \"a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5\": rpc error: code = NotFound desc = could not find container \"a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5\": container with ID starting with a8ea14c43fe968f35fb34547b0bbaefd55acf6d9aaf79271c054e69c8980b0f5 not found: ID does not exist" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.801950 4773 scope.go:117] "RemoveContainer" containerID="c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f" Jan 22 12:37:04 crc kubenswrapper[4773]: E0122 12:37:04.802357 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f\": container with ID starting with c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f not found: ID does not exist" containerID="c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.802392 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f"} err="failed to get container status \"c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f\": rpc error: code = NotFound desc = could not find container \"c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f\": container with ID starting with c21f2b7097d4a2293e351284785744ff86ab123906eff4083cdbf0f7610c017f not found: ID does not exist" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.802410 4773 scope.go:117] "RemoveContainer" containerID="fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12" Jan 22 12:37:04 crc kubenswrapper[4773]: E0122 12:37:04.802649 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12\": container with ID starting with fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12 not found: ID does not exist" containerID="fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12" Jan 22 12:37:04 crc kubenswrapper[4773]: I0122 12:37:04.802675 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12"} err="failed to get container status \"fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12\": rpc error: code = NotFound desc = could not find container \"fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12\": container with ID starting with fd4b808224802dbc65a6204e2d3b1069c527e36b337c7f8a04b5748a65059e12 not found: ID does not exist" Jan 22 12:37:06 crc kubenswrapper[4773]: I0122 12:37:06.667403 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" path="/var/lib/kubelet/pods/bab565d9-eba0-4cb2-8192-da92fb4f12d7/volumes" Jan 22 12:37:07 crc kubenswrapper[4773]: I0122 12:37:07.658451 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:37:07 crc kubenswrapper[4773]: E0122 12:37:07.658727 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:37:18 crc kubenswrapper[4773]: I0122 12:37:18.658943 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:37:18 crc kubenswrapper[4773]: E0122 12:37:18.659849 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:37:29 crc kubenswrapper[4773]: I0122 12:37:29.658258 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:37:29 crc kubenswrapper[4773]: E0122 12:37:29.659119 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:37:40 crc kubenswrapper[4773]: I0122 12:37:40.658918 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:37:41 crc kubenswrapper[4773]: I0122 12:37:41.024846 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"ce34f2feb8bde5a21419e6c5ed6ef15fa76c10eb84f1b408ef4745e198ff003f"} Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.309823 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2q9kr"] Jan 22 12:38:42 crc kubenswrapper[4773]: E0122 12:38:42.313980 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="extract-content" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.314040 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="extract-content" Jan 22 12:38:42 crc kubenswrapper[4773]: E0122 12:38:42.314090 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="extract-utilities" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.314109 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="extract-utilities" Jan 22 12:38:42 crc kubenswrapper[4773]: E0122 12:38:42.314147 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="registry-server" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.314165 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="registry-server" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.314608 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bab565d9-eba0-4cb2-8192-da92fb4f12d7" containerName="registry-server" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.317720 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.330930 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2q9kr"] Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.414585 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-catalog-content\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.414957 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbrqz\" (UniqueName: \"kubernetes.io/projected/37f04993-18dd-4c5f-806a-005b46504f81-kube-api-access-rbrqz\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.415085 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-utilities\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.631491 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-utilities\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.631565 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-catalog-content\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.631623 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbrqz\" (UniqueName: \"kubernetes.io/projected/37f04993-18dd-4c5f-806a-005b46504f81-kube-api-access-rbrqz\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.632454 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-utilities\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.632554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-catalog-content\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.674995 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbrqz\" (UniqueName: \"kubernetes.io/projected/37f04993-18dd-4c5f-806a-005b46504f81-kube-api-access-rbrqz\") pod \"redhat-operators-2q9kr\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:42 crc kubenswrapper[4773]: I0122 12:38:42.964609 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:43 crc kubenswrapper[4773]: I0122 12:38:43.551074 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2q9kr"] Jan 22 12:38:43 crc kubenswrapper[4773]: W0122 12:38:43.556025 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37f04993_18dd_4c5f_806a_005b46504f81.slice/crio-6a05e64b85499329c6f8405ada7eed3d0d1b9246e584d449b42ad6040ab146fd WatchSource:0}: Error finding container 6a05e64b85499329c6f8405ada7eed3d0d1b9246e584d449b42ad6040ab146fd: Status 404 returned error can't find the container with id 6a05e64b85499329c6f8405ada7eed3d0d1b9246e584d449b42ad6040ab146fd Jan 22 12:38:44 crc kubenswrapper[4773]: I0122 12:38:44.328280 4773 generic.go:334] "Generic (PLEG): container finished" podID="37f04993-18dd-4c5f-806a-005b46504f81" containerID="7cfff61abac67779ad5659cf866b0db1ac65e7d7f25f5527c19c45cef6719db7" exitCode=0 Jan 22 12:38:44 crc kubenswrapper[4773]: I0122 12:38:44.328378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerDied","Data":"7cfff61abac67779ad5659cf866b0db1ac65e7d7f25f5527c19c45cef6719db7"} Jan 22 12:38:44 crc kubenswrapper[4773]: I0122 12:38:44.328417 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerStarted","Data":"6a05e64b85499329c6f8405ada7eed3d0d1b9246e584d449b42ad6040ab146fd"} Jan 22 12:38:45 crc kubenswrapper[4773]: I0122 12:38:45.339547 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerStarted","Data":"dd1798702207784c3ec0aac53c98183f91b0bbcf4afee045bd766c62d01af006"} Jan 22 12:38:47 crc kubenswrapper[4773]: I0122 12:38:47.358079 4773 generic.go:334] "Generic (PLEG): container finished" podID="37f04993-18dd-4c5f-806a-005b46504f81" containerID="dd1798702207784c3ec0aac53c98183f91b0bbcf4afee045bd766c62d01af006" exitCode=0 Jan 22 12:38:47 crc kubenswrapper[4773]: I0122 12:38:47.358150 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerDied","Data":"dd1798702207784c3ec0aac53c98183f91b0bbcf4afee045bd766c62d01af006"} Jan 22 12:38:48 crc kubenswrapper[4773]: I0122 12:38:48.370975 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerStarted","Data":"980475552aac569435ed9b1a6a8a05a45c1eaa5c1d9818578ebba067ea4e83d7"} Jan 22 12:38:48 crc kubenswrapper[4773]: I0122 12:38:48.403968 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2q9kr" podStartSLOduration=2.951067996 podStartE2EDuration="6.403910572s" podCreationTimestamp="2026-01-22 12:38:42 +0000 UTC" firstStartedPulling="2026-01-22 12:38:44.330543262 +0000 UTC m=+2631.908659087" lastFinishedPulling="2026-01-22 12:38:47.783385798 +0000 UTC m=+2635.361501663" observedRunningTime="2026-01-22 12:38:48.391766869 +0000 UTC m=+2635.969882754" watchObservedRunningTime="2026-01-22 12:38:48.403910572 +0000 UTC m=+2635.982026397" Jan 22 12:38:52 crc kubenswrapper[4773]: I0122 12:38:52.965330 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:52 crc kubenswrapper[4773]: I0122 12:38:52.966043 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:38:54 crc kubenswrapper[4773]: I0122 12:38:54.027109 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2q9kr" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="registry-server" probeResult="failure" output=< Jan 22 12:38:54 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 12:38:54 crc kubenswrapper[4773]: > Jan 22 12:39:03 crc kubenswrapper[4773]: I0122 12:39:03.031571 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:39:03 crc kubenswrapper[4773]: I0122 12:39:03.093443 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:39:03 crc kubenswrapper[4773]: I0122 12:39:03.274099 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2q9kr"] Jan 22 12:39:04 crc kubenswrapper[4773]: I0122 12:39:04.525560 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2q9kr" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="registry-server" containerID="cri-o://980475552aac569435ed9b1a6a8a05a45c1eaa5c1d9818578ebba067ea4e83d7" gracePeriod=2 Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.576322 4773 generic.go:334] "Generic (PLEG): container finished" podID="37f04993-18dd-4c5f-806a-005b46504f81" containerID="980475552aac569435ed9b1a6a8a05a45c1eaa5c1d9818578ebba067ea4e83d7" exitCode=0 Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.576373 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerDied","Data":"980475552aac569435ed9b1a6a8a05a45c1eaa5c1d9818578ebba067ea4e83d7"} Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.682736 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.708355 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-catalog-content\") pod \"37f04993-18dd-4c5f-806a-005b46504f81\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.708431 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbrqz\" (UniqueName: \"kubernetes.io/projected/37f04993-18dd-4c5f-806a-005b46504f81-kube-api-access-rbrqz\") pod \"37f04993-18dd-4c5f-806a-005b46504f81\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.713991 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37f04993-18dd-4c5f-806a-005b46504f81-kube-api-access-rbrqz" (OuterVolumeSpecName: "kube-api-access-rbrqz") pod "37f04993-18dd-4c5f-806a-005b46504f81" (UID: "37f04993-18dd-4c5f-806a-005b46504f81"). InnerVolumeSpecName "kube-api-access-rbrqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.809967 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-utilities\") pod \"37f04993-18dd-4c5f-806a-005b46504f81\" (UID: \"37f04993-18dd-4c5f-806a-005b46504f81\") " Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.810572 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbrqz\" (UniqueName: \"kubernetes.io/projected/37f04993-18dd-4c5f-806a-005b46504f81-kube-api-access-rbrqz\") on node \"crc\" DevicePath \"\"" Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.810729 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-utilities" (OuterVolumeSpecName: "utilities") pod "37f04993-18dd-4c5f-806a-005b46504f81" (UID: "37f04993-18dd-4c5f-806a-005b46504f81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.853196 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37f04993-18dd-4c5f-806a-005b46504f81" (UID: "37f04993-18dd-4c5f-806a-005b46504f81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.912551 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:39:07 crc kubenswrapper[4773]: I0122 12:39:07.912584 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37f04993-18dd-4c5f-806a-005b46504f81-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.587754 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2q9kr" event={"ID":"37f04993-18dd-4c5f-806a-005b46504f81","Type":"ContainerDied","Data":"6a05e64b85499329c6f8405ada7eed3d0d1b9246e584d449b42ad6040ab146fd"} Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.587823 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2q9kr" Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.587872 4773 scope.go:117] "RemoveContainer" containerID="980475552aac569435ed9b1a6a8a05a45c1eaa5c1d9818578ebba067ea4e83d7" Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.630350 4773 scope.go:117] "RemoveContainer" containerID="dd1798702207784c3ec0aac53c98183f91b0bbcf4afee045bd766c62d01af006" Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.633901 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2q9kr"] Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.645277 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2q9kr"] Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.670727 4773 scope.go:117] "RemoveContainer" containerID="7cfff61abac67779ad5659cf866b0db1ac65e7d7f25f5527c19c45cef6719db7" Jan 22 12:39:08 crc kubenswrapper[4773]: I0122 12:39:08.672612 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37f04993-18dd-4c5f-806a-005b46504f81" path="/var/lib/kubelet/pods/37f04993-18dd-4c5f-806a-005b46504f81/volumes" Jan 22 12:40:04 crc kubenswrapper[4773]: I0122 12:40:04.074228 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:40:04 crc kubenswrapper[4773]: I0122 12:40:04.074884 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:40:34 crc kubenswrapper[4773]: I0122 12:40:34.074622 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:40:34 crc kubenswrapper[4773]: I0122 12:40:34.075138 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.074495 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.075144 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.075232 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.075969 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ce34f2feb8bde5a21419e6c5ed6ef15fa76c10eb84f1b408ef4745e198ff003f"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.076054 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://ce34f2feb8bde5a21419e6c5ed6ef15fa76c10eb84f1b408ef4745e198ff003f" gracePeriod=600 Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.729241 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="ce34f2feb8bde5a21419e6c5ed6ef15fa76c10eb84f1b408ef4745e198ff003f" exitCode=0 Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.729395 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"ce34f2feb8bde5a21419e6c5ed6ef15fa76c10eb84f1b408ef4745e198ff003f"} Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.729595 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1"} Jan 22 12:41:04 crc kubenswrapper[4773]: I0122 12:41:04.729661 4773 scope.go:117] "RemoveContainer" containerID="b866d654237ad0da40dd93a655d90ee50b5cbec35788d4ef3dc6d29b8f9722ac" Jan 22 12:43:02 crc kubenswrapper[4773]: I0122 12:43:02.708601 4773 scope.go:117] "RemoveContainer" containerID="a3fb385ad3726b9bb8abc90ae7b4e03a721c764be3a5af54413fa82e56b6ff23" Jan 22 12:43:02 crc kubenswrapper[4773]: I0122 12:43:02.729565 4773 scope.go:117] "RemoveContainer" containerID="c426b391503107adf1fb26c4f9d046d0a66e65903b251ccf1b2e308232b9e373" Jan 22 12:43:02 crc kubenswrapper[4773]: I0122 12:43:02.755033 4773 scope.go:117] "RemoveContainer" containerID="0fe7e2318832737d86fe600c1393556474f77a6f16477cce10a55943aca51351" Jan 22 12:43:04 crc kubenswrapper[4773]: I0122 12:43:04.074331 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:43:04 crc kubenswrapper[4773]: I0122 12:43:04.074499 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:43:34 crc kubenswrapper[4773]: I0122 12:43:34.074979 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:43:34 crc kubenswrapper[4773]: I0122 12:43:34.075502 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:44:04 crc kubenswrapper[4773]: I0122 12:44:04.074470 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:44:04 crc kubenswrapper[4773]: I0122 12:44:04.075073 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:44:04 crc kubenswrapper[4773]: I0122 12:44:04.075154 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:44:04 crc kubenswrapper[4773]: I0122 12:44:04.075806 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:44:04 crc kubenswrapper[4773]: I0122 12:44:04.075895 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" gracePeriod=600 Jan 22 12:44:04 crc kubenswrapper[4773]: E0122 12:44:04.754476 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:44:05 crc kubenswrapper[4773]: I0122 12:44:05.417573 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" exitCode=0 Jan 22 12:44:05 crc kubenswrapper[4773]: I0122 12:44:05.417617 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1"} Jan 22 12:44:05 crc kubenswrapper[4773]: I0122 12:44:05.417667 4773 scope.go:117] "RemoveContainer" containerID="ce34f2feb8bde5a21419e6c5ed6ef15fa76c10eb84f1b408ef4745e198ff003f" Jan 22 12:44:05 crc kubenswrapper[4773]: I0122 12:44:05.418336 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:44:05 crc kubenswrapper[4773]: E0122 12:44:05.418949 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:44:19 crc kubenswrapper[4773]: I0122 12:44:19.658197 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:44:19 crc kubenswrapper[4773]: E0122 12:44:19.658940 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:44:34 crc kubenswrapper[4773]: I0122 12:44:34.657987 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:44:34 crc kubenswrapper[4773]: E0122 12:44:34.658777 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:44:45 crc kubenswrapper[4773]: I0122 12:44:45.658918 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:44:45 crc kubenswrapper[4773]: E0122 12:44:45.659944 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.156600 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f"] Jan 22 12:45:00 crc kubenswrapper[4773]: E0122 12:45:00.157519 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="registry-server" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.157533 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="registry-server" Jan 22 12:45:00 crc kubenswrapper[4773]: E0122 12:45:00.157546 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="extract-utilities" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.157553 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="extract-utilities" Jan 22 12:45:00 crc kubenswrapper[4773]: E0122 12:45:00.157567 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="extract-content" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.157572 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="extract-content" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.157723 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="37f04993-18dd-4c5f-806a-005b46504f81" containerName="registry-server" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.158358 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.161610 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.165246 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f"] Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.165873 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.200195 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74ce6389-b348-4f77-adc5-832d46e5f3cf-secret-volume\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.200598 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74ce6389-b348-4f77-adc5-832d46e5f3cf-config-volume\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.200636 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqr6d\" (UniqueName: \"kubernetes.io/projected/74ce6389-b348-4f77-adc5-832d46e5f3cf-kube-api-access-cqr6d\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.302560 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74ce6389-b348-4f77-adc5-832d46e5f3cf-config-volume\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.302640 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqr6d\" (UniqueName: \"kubernetes.io/projected/74ce6389-b348-4f77-adc5-832d46e5f3cf-kube-api-access-cqr6d\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.302816 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74ce6389-b348-4f77-adc5-832d46e5f3cf-secret-volume\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.304495 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74ce6389-b348-4f77-adc5-832d46e5f3cf-config-volume\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.308925 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74ce6389-b348-4f77-adc5-832d46e5f3cf-secret-volume\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.335545 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqr6d\" (UniqueName: \"kubernetes.io/projected/74ce6389-b348-4f77-adc5-832d46e5f3cf-kube-api-access-cqr6d\") pod \"collect-profiles-29484765-bj72f\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.487976 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.658244 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:45:00 crc kubenswrapper[4773]: E0122 12:45:00.658688 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.779315 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f"] Jan 22 12:45:00 crc kubenswrapper[4773]: I0122 12:45:00.946854 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" event={"ID":"74ce6389-b348-4f77-adc5-832d46e5f3cf","Type":"ContainerStarted","Data":"3ae9f6779877aeb727274de12152e52304514b7a580bce4831085ba7d2cc6292"} Jan 22 12:45:02 crc kubenswrapper[4773]: I0122 12:45:02.963870 4773 generic.go:334] "Generic (PLEG): container finished" podID="74ce6389-b348-4f77-adc5-832d46e5f3cf" containerID="db576f384296f5a43a90c92a5975a7411a7af9b5d4a993677c9a824545f8ac2a" exitCode=0 Jan 22 12:45:02 crc kubenswrapper[4773]: I0122 12:45:02.964155 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" event={"ID":"74ce6389-b348-4f77-adc5-832d46e5f3cf","Type":"ContainerDied","Data":"db576f384296f5a43a90c92a5975a7411a7af9b5d4a993677c9a824545f8ac2a"} Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.239177 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.371629 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74ce6389-b348-4f77-adc5-832d46e5f3cf-config-volume\") pod \"74ce6389-b348-4f77-adc5-832d46e5f3cf\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.371749 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqr6d\" (UniqueName: \"kubernetes.io/projected/74ce6389-b348-4f77-adc5-832d46e5f3cf-kube-api-access-cqr6d\") pod \"74ce6389-b348-4f77-adc5-832d46e5f3cf\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.371990 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74ce6389-b348-4f77-adc5-832d46e5f3cf-secret-volume\") pod \"74ce6389-b348-4f77-adc5-832d46e5f3cf\" (UID: \"74ce6389-b348-4f77-adc5-832d46e5f3cf\") " Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.374472 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74ce6389-b348-4f77-adc5-832d46e5f3cf-config-volume" (OuterVolumeSpecName: "config-volume") pod "74ce6389-b348-4f77-adc5-832d46e5f3cf" (UID: "74ce6389-b348-4f77-adc5-832d46e5f3cf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.380266 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74ce6389-b348-4f77-adc5-832d46e5f3cf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "74ce6389-b348-4f77-adc5-832d46e5f3cf" (UID: "74ce6389-b348-4f77-adc5-832d46e5f3cf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.380791 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74ce6389-b348-4f77-adc5-832d46e5f3cf-kube-api-access-cqr6d" (OuterVolumeSpecName: "kube-api-access-cqr6d") pod "74ce6389-b348-4f77-adc5-832d46e5f3cf" (UID: "74ce6389-b348-4f77-adc5-832d46e5f3cf"). InnerVolumeSpecName "kube-api-access-cqr6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.474076 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/74ce6389-b348-4f77-adc5-832d46e5f3cf-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.474433 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqr6d\" (UniqueName: \"kubernetes.io/projected/74ce6389-b348-4f77-adc5-832d46e5f3cf-kube-api-access-cqr6d\") on node \"crc\" DevicePath \"\"" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.474574 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/74ce6389-b348-4f77-adc5-832d46e5f3cf-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.981162 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" event={"ID":"74ce6389-b348-4f77-adc5-832d46e5f3cf","Type":"ContainerDied","Data":"3ae9f6779877aeb727274de12152e52304514b7a580bce4831085ba7d2cc6292"} Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.981223 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f" Jan 22 12:45:04 crc kubenswrapper[4773]: I0122 12:45:04.981244 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ae9f6779877aeb727274de12152e52304514b7a580bce4831085ba7d2cc6292" Jan 22 12:45:05 crc kubenswrapper[4773]: I0122 12:45:05.326377 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk"] Jan 22 12:45:05 crc kubenswrapper[4773]: I0122 12:45:05.332792 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484720-tt5zk"] Jan 22 12:45:06 crc kubenswrapper[4773]: I0122 12:45:06.670136 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ab754af-86ec-4c82-bccc-4f773da62193" path="/var/lib/kubelet/pods/6ab754af-86ec-4c82-bccc-4f773da62193/volumes" Jan 22 12:45:14 crc kubenswrapper[4773]: I0122 12:45:14.658626 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:45:14 crc kubenswrapper[4773]: E0122 12:45:14.659493 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:45:28 crc kubenswrapper[4773]: I0122 12:45:28.657979 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:45:28 crc kubenswrapper[4773]: E0122 12:45:28.659125 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:45:41 crc kubenswrapper[4773]: I0122 12:45:41.658914 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:45:41 crc kubenswrapper[4773]: E0122 12:45:41.660009 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:45:54 crc kubenswrapper[4773]: I0122 12:45:54.659034 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:45:54 crc kubenswrapper[4773]: E0122 12:45:54.660472 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:46:02 crc kubenswrapper[4773]: I0122 12:46:02.832835 4773 scope.go:117] "RemoveContainer" containerID="023c6615a88d66acca8896c45bcf2882eb178d099b8b0fdf5b43e9a4bddf4647" Jan 22 12:46:08 crc kubenswrapper[4773]: I0122 12:46:08.658185 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:46:08 crc kubenswrapper[4773]: E0122 12:46:08.659061 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:46:21 crc kubenswrapper[4773]: I0122 12:46:21.657683 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:46:21 crc kubenswrapper[4773]: E0122 12:46:21.658471 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:46:32 crc kubenswrapper[4773]: I0122 12:46:32.663650 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:46:32 crc kubenswrapper[4773]: E0122 12:46:32.664521 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:46:46 crc kubenswrapper[4773]: I0122 12:46:46.658865 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:46:46 crc kubenswrapper[4773]: E0122 12:46:46.659937 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:46:59 crc kubenswrapper[4773]: I0122 12:46:59.658345 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:46:59 crc kubenswrapper[4773]: E0122 12:46:59.659140 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:47:13 crc kubenswrapper[4773]: I0122 12:47:13.658072 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:47:13 crc kubenswrapper[4773]: E0122 12:47:13.659863 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:47:24 crc kubenswrapper[4773]: I0122 12:47:24.658149 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:47:24 crc kubenswrapper[4773]: E0122 12:47:24.659058 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:47:24 crc kubenswrapper[4773]: I0122 12:47:24.895640 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f7phz"] Jan 22 12:47:24 crc kubenswrapper[4773]: E0122 12:47:24.896442 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74ce6389-b348-4f77-adc5-832d46e5f3cf" containerName="collect-profiles" Jan 22 12:47:24 crc kubenswrapper[4773]: I0122 12:47:24.896613 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="74ce6389-b348-4f77-adc5-832d46e5f3cf" containerName="collect-profiles" Jan 22 12:47:24 crc kubenswrapper[4773]: I0122 12:47:24.896932 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="74ce6389-b348-4f77-adc5-832d46e5f3cf" containerName="collect-profiles" Jan 22 12:47:24 crc kubenswrapper[4773]: I0122 12:47:24.898582 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:24 crc kubenswrapper[4773]: I0122 12:47:24.906670 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f7phz"] Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.062117 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fv28\" (UniqueName: \"kubernetes.io/projected/26383017-c239-433f-b651-8a1832ceb6a0-kube-api-access-7fv28\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.062799 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-utilities\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.062995 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-catalog-content\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.164395 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fv28\" (UniqueName: \"kubernetes.io/projected/26383017-c239-433f-b651-8a1832ceb6a0-kube-api-access-7fv28\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.164477 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-utilities\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.164594 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-catalog-content\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.165424 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-utilities\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.165678 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-catalog-content\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.187632 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fv28\" (UniqueName: \"kubernetes.io/projected/26383017-c239-433f-b651-8a1832ceb6a0-kube-api-access-7fv28\") pod \"community-operators-f7phz\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.247186 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:25 crc kubenswrapper[4773]: I0122 12:47:25.740559 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f7phz"] Jan 22 12:47:26 crc kubenswrapper[4773]: I0122 12:47:26.642949 4773 generic.go:334] "Generic (PLEG): container finished" podID="26383017-c239-433f-b651-8a1832ceb6a0" containerID="fa4c7f877fe03b116d8ee94e28eac83f6d75a791ca162db7102346e2381a19db" exitCode=0 Jan 22 12:47:26 crc kubenswrapper[4773]: I0122 12:47:26.643014 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerDied","Data":"fa4c7f877fe03b116d8ee94e28eac83f6d75a791ca162db7102346e2381a19db"} Jan 22 12:47:26 crc kubenswrapper[4773]: I0122 12:47:26.643055 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerStarted","Data":"9d231ee88cd58e0edcd5569d976031037eb3dce9bd2f5c22d627afee6a709179"} Jan 22 12:47:26 crc kubenswrapper[4773]: I0122 12:47:26.646362 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:47:27 crc kubenswrapper[4773]: I0122 12:47:27.650745 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerStarted","Data":"37e1ac471130da31ab7ec462b23727f50df39ed4cd91047626b052a8a0787f1a"} Jan 22 12:47:28 crc kubenswrapper[4773]: I0122 12:47:28.662946 4773 generic.go:334] "Generic (PLEG): container finished" podID="26383017-c239-433f-b651-8a1832ceb6a0" containerID="37e1ac471130da31ab7ec462b23727f50df39ed4cd91047626b052a8a0787f1a" exitCode=0 Jan 22 12:47:28 crc kubenswrapper[4773]: I0122 12:47:28.667379 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerDied","Data":"37e1ac471130da31ab7ec462b23727f50df39ed4cd91047626b052a8a0787f1a"} Jan 22 12:47:29 crc kubenswrapper[4773]: I0122 12:47:29.671098 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerStarted","Data":"0c4d25891c68ea83eb70707c5204b65941dd8fc83cc3bd53b448d37fd9976a2f"} Jan 22 12:47:29 crc kubenswrapper[4773]: I0122 12:47:29.689551 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f7phz" podStartSLOduration=3.248216564 podStartE2EDuration="5.689521399s" podCreationTimestamp="2026-01-22 12:47:24 +0000 UTC" firstStartedPulling="2026-01-22 12:47:26.645915998 +0000 UTC m=+3154.224031843" lastFinishedPulling="2026-01-22 12:47:29.087220863 +0000 UTC m=+3156.665336678" observedRunningTime="2026-01-22 12:47:29.687715858 +0000 UTC m=+3157.265831703" watchObservedRunningTime="2026-01-22 12:47:29.689521399 +0000 UTC m=+3157.267637224" Jan 22 12:47:35 crc kubenswrapper[4773]: I0122 12:47:35.248542 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:35 crc kubenswrapper[4773]: I0122 12:47:35.248967 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:35 crc kubenswrapper[4773]: I0122 12:47:35.290873 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:35 crc kubenswrapper[4773]: I0122 12:47:35.770995 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:36 crc kubenswrapper[4773]: I0122 12:47:36.649034 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f7phz"] Jan 22 12:47:37 crc kubenswrapper[4773]: I0122 12:47:37.658405 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:47:37 crc kubenswrapper[4773]: E0122 12:47:37.658641 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:47:37 crc kubenswrapper[4773]: I0122 12:47:37.732931 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f7phz" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="registry-server" containerID="cri-o://0c4d25891c68ea83eb70707c5204b65941dd8fc83cc3bd53b448d37fd9976a2f" gracePeriod=2 Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.654621 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kwwvb"] Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.657248 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.685899 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwwvb"] Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.784308 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-catalog-content\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.784376 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxg9m\" (UniqueName: \"kubernetes.io/projected/e5e61b34-b44c-4d50-9e4d-b4baea93331f-kube-api-access-jxg9m\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.784535 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-utilities\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.886027 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-catalog-content\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.886096 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxg9m\" (UniqueName: \"kubernetes.io/projected/e5e61b34-b44c-4d50-9e4d-b4baea93331f-kube-api-access-jxg9m\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.886221 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-utilities\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.886614 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-catalog-content\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.886712 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-utilities\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.916001 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxg9m\" (UniqueName: \"kubernetes.io/projected/e5e61b34-b44c-4d50-9e4d-b4baea93331f-kube-api-access-jxg9m\") pod \"redhat-marketplace-kwwvb\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:38 crc kubenswrapper[4773]: I0122 12:47:38.978502 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:39 crc kubenswrapper[4773]: I0122 12:47:39.441787 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwwvb"] Jan 22 12:47:39 crc kubenswrapper[4773]: I0122 12:47:39.750929 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerID="3423aadfb5a63fbb0025e0bbff4037b542276b95fdd778309899b6548a9fd338" exitCode=0 Jan 22 12:47:39 crc kubenswrapper[4773]: I0122 12:47:39.752143 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerDied","Data":"3423aadfb5a63fbb0025e0bbff4037b542276b95fdd778309899b6548a9fd338"} Jan 22 12:47:39 crc kubenswrapper[4773]: I0122 12:47:39.752185 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerStarted","Data":"d49b2930691d3afece52e3594ba689e26953ec085e9ab6a86e0a4aefa7540bb6"} Jan 22 12:47:39 crc kubenswrapper[4773]: I0122 12:47:39.761895 4773 generic.go:334] "Generic (PLEG): container finished" podID="26383017-c239-433f-b651-8a1832ceb6a0" containerID="0c4d25891c68ea83eb70707c5204b65941dd8fc83cc3bd53b448d37fd9976a2f" exitCode=0 Jan 22 12:47:39 crc kubenswrapper[4773]: I0122 12:47:39.761945 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerDied","Data":"0c4d25891c68ea83eb70707c5204b65941dd8fc83cc3bd53b448d37fd9976a2f"} Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.006401 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.110600 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fv28\" (UniqueName: \"kubernetes.io/projected/26383017-c239-433f-b651-8a1832ceb6a0-kube-api-access-7fv28\") pod \"26383017-c239-433f-b651-8a1832ceb6a0\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.110682 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-catalog-content\") pod \"26383017-c239-433f-b651-8a1832ceb6a0\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.110740 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-utilities\") pod \"26383017-c239-433f-b651-8a1832ceb6a0\" (UID: \"26383017-c239-433f-b651-8a1832ceb6a0\") " Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.111755 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-utilities" (OuterVolumeSpecName: "utilities") pod "26383017-c239-433f-b651-8a1832ceb6a0" (UID: "26383017-c239-433f-b651-8a1832ceb6a0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.121168 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26383017-c239-433f-b651-8a1832ceb6a0-kube-api-access-7fv28" (OuterVolumeSpecName: "kube-api-access-7fv28") pod "26383017-c239-433f-b651-8a1832ceb6a0" (UID: "26383017-c239-433f-b651-8a1832ceb6a0"). InnerVolumeSpecName "kube-api-access-7fv28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.166691 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "26383017-c239-433f-b651-8a1832ceb6a0" (UID: "26383017-c239-433f-b651-8a1832ceb6a0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.212378 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fv28\" (UniqueName: \"kubernetes.io/projected/26383017-c239-433f-b651-8a1832ceb6a0-kube-api-access-7fv28\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.212408 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.212417 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/26383017-c239-433f-b651-8a1832ceb6a0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.771869 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f7phz" event={"ID":"26383017-c239-433f-b651-8a1832ceb6a0","Type":"ContainerDied","Data":"9d231ee88cd58e0edcd5569d976031037eb3dce9bd2f5c22d627afee6a709179"} Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.771926 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f7phz" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.771934 4773 scope.go:117] "RemoveContainer" containerID="0c4d25891c68ea83eb70707c5204b65941dd8fc83cc3bd53b448d37fd9976a2f" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.807952 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f7phz"] Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.813219 4773 scope.go:117] "RemoveContainer" containerID="37e1ac471130da31ab7ec462b23727f50df39ed4cd91047626b052a8a0787f1a" Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.819277 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f7phz"] Jan 22 12:47:40 crc kubenswrapper[4773]: I0122 12:47:40.831653 4773 scope.go:117] "RemoveContainer" containerID="fa4c7f877fe03b116d8ee94e28eac83f6d75a791ca162db7102346e2381a19db" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.460583 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6cznt"] Jan 22 12:47:41 crc kubenswrapper[4773]: E0122 12:47:41.461036 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="extract-utilities" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.461071 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="extract-utilities" Jan 22 12:47:41 crc kubenswrapper[4773]: E0122 12:47:41.461092 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="registry-server" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.461100 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="registry-server" Jan 22 12:47:41 crc kubenswrapper[4773]: E0122 12:47:41.461111 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="extract-content" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.461116 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="extract-content" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.461334 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="26383017-c239-433f-b651-8a1832ceb6a0" containerName="registry-server" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.462974 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.476336 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6cznt"] Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.530620 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q29bv\" (UniqueName: \"kubernetes.io/projected/000a2229-b4ce-4b53-8fd3-47d20f3724ac-kube-api-access-q29bv\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.530685 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-utilities\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.530722 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-catalog-content\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.632381 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q29bv\" (UniqueName: \"kubernetes.io/projected/000a2229-b4ce-4b53-8fd3-47d20f3724ac-kube-api-access-q29bv\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.632456 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-utilities\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.632489 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-catalog-content\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.633251 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-catalog-content\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.633312 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-utilities\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.657886 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q29bv\" (UniqueName: \"kubernetes.io/projected/000a2229-b4ce-4b53-8fd3-47d20f3724ac-kube-api-access-q29bv\") pod \"certified-operators-6cznt\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.782174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerStarted","Data":"c67875551c6cb5824b9166c251ac78636dfc1c1747be7f4eff1adbf03196d7c5"} Jan 22 12:47:41 crc kubenswrapper[4773]: I0122 12:47:41.785146 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.263122 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6cznt"] Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.669602 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26383017-c239-433f-b651-8a1832ceb6a0" path="/var/lib/kubelet/pods/26383017-c239-433f-b651-8a1832ceb6a0/volumes" Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.792832 4773 generic.go:334] "Generic (PLEG): container finished" podID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerID="bf4509af1aaa9cc238ea4f61ba1b7588ea6d1aa02cd1cd63b0ba27de956bc1ad" exitCode=0 Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.792932 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6cznt" event={"ID":"000a2229-b4ce-4b53-8fd3-47d20f3724ac","Type":"ContainerDied","Data":"bf4509af1aaa9cc238ea4f61ba1b7588ea6d1aa02cd1cd63b0ba27de956bc1ad"} Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.792973 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6cznt" event={"ID":"000a2229-b4ce-4b53-8fd3-47d20f3724ac","Type":"ContainerStarted","Data":"8db62a47b944fadbab69ee77d7a3584ea961936ec0aa199f969013ca45315f0d"} Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.796148 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerID="c67875551c6cb5824b9166c251ac78636dfc1c1747be7f4eff1adbf03196d7c5" exitCode=0 Jan 22 12:47:42 crc kubenswrapper[4773]: I0122 12:47:42.796210 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerDied","Data":"c67875551c6cb5824b9166c251ac78636dfc1c1747be7f4eff1adbf03196d7c5"} Jan 22 12:47:44 crc kubenswrapper[4773]: I0122 12:47:44.820586 4773 generic.go:334] "Generic (PLEG): container finished" podID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerID="fe6fb20bdbbd06eab8656d4e2d0cf5117cb1867af381b98d6a588fa88a421e83" exitCode=0 Jan 22 12:47:44 crc kubenswrapper[4773]: I0122 12:47:44.820971 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6cznt" event={"ID":"000a2229-b4ce-4b53-8fd3-47d20f3724ac","Type":"ContainerDied","Data":"fe6fb20bdbbd06eab8656d4e2d0cf5117cb1867af381b98d6a588fa88a421e83"} Jan 22 12:47:44 crc kubenswrapper[4773]: I0122 12:47:44.826378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerStarted","Data":"327f99f7f5376a2fc8d5cad40a46c3dca70d44f67917f12b357338cd64580890"} Jan 22 12:47:44 crc kubenswrapper[4773]: I0122 12:47:44.866814 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kwwvb" podStartSLOduration=2.946597815 podStartE2EDuration="6.86678714s" podCreationTimestamp="2026-01-22 12:47:38 +0000 UTC" firstStartedPulling="2026-01-22 12:47:39.754524176 +0000 UTC m=+3167.332640001" lastFinishedPulling="2026-01-22 12:47:43.674713481 +0000 UTC m=+3171.252829326" observedRunningTime="2026-01-22 12:47:44.863395104 +0000 UTC m=+3172.441510949" watchObservedRunningTime="2026-01-22 12:47:44.86678714 +0000 UTC m=+3172.444902975" Jan 22 12:47:45 crc kubenswrapper[4773]: I0122 12:47:45.836187 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6cznt" event={"ID":"000a2229-b4ce-4b53-8fd3-47d20f3724ac","Type":"ContainerStarted","Data":"f656b477297daace5034c5bf72ca58a4c0cd89814a37dde0318c8f6d2bf80d4f"} Jan 22 12:47:45 crc kubenswrapper[4773]: I0122 12:47:45.864644 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6cznt" podStartSLOduration=2.360145201 podStartE2EDuration="4.864621074s" podCreationTimestamp="2026-01-22 12:47:41 +0000 UTC" firstStartedPulling="2026-01-22 12:47:42.7946192 +0000 UTC m=+3170.372735035" lastFinishedPulling="2026-01-22 12:47:45.299095083 +0000 UTC m=+3172.877210908" observedRunningTime="2026-01-22 12:47:45.857034751 +0000 UTC m=+3173.435150596" watchObservedRunningTime="2026-01-22 12:47:45.864621074 +0000 UTC m=+3173.442736919" Jan 22 12:47:48 crc kubenswrapper[4773]: I0122 12:47:48.979718 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:48 crc kubenswrapper[4773]: I0122 12:47:48.980089 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:49 crc kubenswrapper[4773]: I0122 12:47:49.028006 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:49 crc kubenswrapper[4773]: I0122 12:47:49.902769 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:50 crc kubenswrapper[4773]: I0122 12:47:50.652912 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwwvb"] Jan 22 12:47:51 crc kubenswrapper[4773]: I0122 12:47:51.658510 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:47:51 crc kubenswrapper[4773]: E0122 12:47:51.659125 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:47:51 crc kubenswrapper[4773]: I0122 12:47:51.785502 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:51 crc kubenswrapper[4773]: I0122 12:47:51.785552 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:51 crc kubenswrapper[4773]: I0122 12:47:51.838147 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:51 crc kubenswrapper[4773]: I0122 12:47:51.880204 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kwwvb" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="registry-server" containerID="cri-o://327f99f7f5376a2fc8d5cad40a46c3dca70d44f67917f12b357338cd64580890" gracePeriod=2 Jan 22 12:47:51 crc kubenswrapper[4773]: I0122 12:47:51.928494 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:52 crc kubenswrapper[4773]: I0122 12:47:52.890807 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerID="327f99f7f5376a2fc8d5cad40a46c3dca70d44f67917f12b357338cd64580890" exitCode=0 Jan 22 12:47:52 crc kubenswrapper[4773]: I0122 12:47:52.890895 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerDied","Data":"327f99f7f5376a2fc8d5cad40a46c3dca70d44f67917f12b357338cd64580890"} Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.444054 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.512228 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-utilities\") pod \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.512344 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxg9m\" (UniqueName: \"kubernetes.io/projected/e5e61b34-b44c-4d50-9e4d-b4baea93331f-kube-api-access-jxg9m\") pod \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.512449 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-catalog-content\") pod \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\" (UID: \"e5e61b34-b44c-4d50-9e4d-b4baea93331f\") " Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.513012 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-utilities" (OuterVolumeSpecName: "utilities") pod "e5e61b34-b44c-4d50-9e4d-b4baea93331f" (UID: "e5e61b34-b44c-4d50-9e4d-b4baea93331f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.525820 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5e61b34-b44c-4d50-9e4d-b4baea93331f-kube-api-access-jxg9m" (OuterVolumeSpecName: "kube-api-access-jxg9m") pod "e5e61b34-b44c-4d50-9e4d-b4baea93331f" (UID: "e5e61b34-b44c-4d50-9e4d-b4baea93331f"). InnerVolumeSpecName "kube-api-access-jxg9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.540689 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5e61b34-b44c-4d50-9e4d-b4baea93331f" (UID: "e5e61b34-b44c-4d50-9e4d-b4baea93331f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.613937 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.613980 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e61b34-b44c-4d50-9e4d-b4baea93331f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.613993 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxg9m\" (UniqueName: \"kubernetes.io/projected/e5e61b34-b44c-4d50-9e4d-b4baea93331f-kube-api-access-jxg9m\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.901385 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwwvb" event={"ID":"e5e61b34-b44c-4d50-9e4d-b4baea93331f","Type":"ContainerDied","Data":"d49b2930691d3afece52e3594ba689e26953ec085e9ab6a86e0a4aefa7540bb6"} Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.902518 4773 scope.go:117] "RemoveContainer" containerID="327f99f7f5376a2fc8d5cad40a46c3dca70d44f67917f12b357338cd64580890" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.901473 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwwvb" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.926667 4773 scope.go:117] "RemoveContainer" containerID="c67875551c6cb5824b9166c251ac78636dfc1c1747be7f4eff1adbf03196d7c5" Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.953037 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwwvb"] Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.967746 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwwvb"] Jan 22 12:47:53 crc kubenswrapper[4773]: I0122 12:47:53.975102 4773 scope.go:117] "RemoveContainer" containerID="3423aadfb5a63fbb0025e0bbff4037b542276b95fdd778309899b6548a9fd338" Jan 22 12:47:54 crc kubenswrapper[4773]: I0122 12:47:54.258139 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6cznt"] Jan 22 12:47:54 crc kubenswrapper[4773]: I0122 12:47:54.258808 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6cznt" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="registry-server" containerID="cri-o://f656b477297daace5034c5bf72ca58a4c0cd89814a37dde0318c8f6d2bf80d4f" gracePeriod=2 Jan 22 12:47:54 crc kubenswrapper[4773]: I0122 12:47:54.673941 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" path="/var/lib/kubelet/pods/e5e61b34-b44c-4d50-9e4d-b4baea93331f/volumes" Jan 22 12:47:55 crc kubenswrapper[4773]: I0122 12:47:55.939254 4773 generic.go:334] "Generic (PLEG): container finished" podID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerID="f656b477297daace5034c5bf72ca58a4c0cd89814a37dde0318c8f6d2bf80d4f" exitCode=0 Jan 22 12:47:55 crc kubenswrapper[4773]: I0122 12:47:55.939345 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6cznt" event={"ID":"000a2229-b4ce-4b53-8fd3-47d20f3724ac","Type":"ContainerDied","Data":"f656b477297daace5034c5bf72ca58a4c0cd89814a37dde0318c8f6d2bf80d4f"} Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.377588 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.461874 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q29bv\" (UniqueName: \"kubernetes.io/projected/000a2229-b4ce-4b53-8fd3-47d20f3724ac-kube-api-access-q29bv\") pod \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.462249 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-catalog-content\") pod \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.462342 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-utilities\") pod \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\" (UID: \"000a2229-b4ce-4b53-8fd3-47d20f3724ac\") " Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.463330 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-utilities" (OuterVolumeSpecName: "utilities") pod "000a2229-b4ce-4b53-8fd3-47d20f3724ac" (UID: "000a2229-b4ce-4b53-8fd3-47d20f3724ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.469190 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/000a2229-b4ce-4b53-8fd3-47d20f3724ac-kube-api-access-q29bv" (OuterVolumeSpecName: "kube-api-access-q29bv") pod "000a2229-b4ce-4b53-8fd3-47d20f3724ac" (UID: "000a2229-b4ce-4b53-8fd3-47d20f3724ac"). InnerVolumeSpecName "kube-api-access-q29bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.508519 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "000a2229-b4ce-4b53-8fd3-47d20f3724ac" (UID: "000a2229-b4ce-4b53-8fd3-47d20f3724ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.564167 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q29bv\" (UniqueName: \"kubernetes.io/projected/000a2229-b4ce-4b53-8fd3-47d20f3724ac-kube-api-access-q29bv\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.564201 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.564210 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/000a2229-b4ce-4b53-8fd3-47d20f3724ac-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.950184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6cznt" event={"ID":"000a2229-b4ce-4b53-8fd3-47d20f3724ac","Type":"ContainerDied","Data":"8db62a47b944fadbab69ee77d7a3584ea961936ec0aa199f969013ca45315f0d"} Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.950243 4773 scope.go:117] "RemoveContainer" containerID="f656b477297daace5034c5bf72ca58a4c0cd89814a37dde0318c8f6d2bf80d4f" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.950264 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6cznt" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.982576 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6cznt"] Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.989278 4773 scope.go:117] "RemoveContainer" containerID="fe6fb20bdbbd06eab8656d4e2d0cf5117cb1867af381b98d6a588fa88a421e83" Jan 22 12:47:56 crc kubenswrapper[4773]: I0122 12:47:56.990194 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6cznt"] Jan 22 12:47:57 crc kubenswrapper[4773]: I0122 12:47:57.009735 4773 scope.go:117] "RemoveContainer" containerID="bf4509af1aaa9cc238ea4f61ba1b7588ea6d1aa02cd1cd63b0ba27de956bc1ad" Jan 22 12:47:58 crc kubenswrapper[4773]: I0122 12:47:58.674257 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" path="/var/lib/kubelet/pods/000a2229-b4ce-4b53-8fd3-47d20f3724ac/volumes" Jan 22 12:48:06 crc kubenswrapper[4773]: I0122 12:48:06.658803 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:48:06 crc kubenswrapper[4773]: E0122 12:48:06.659440 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:48:21 crc kubenswrapper[4773]: I0122 12:48:21.659068 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:48:21 crc kubenswrapper[4773]: E0122 12:48:21.659953 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:48:35 crc kubenswrapper[4773]: I0122 12:48:35.658501 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:48:35 crc kubenswrapper[4773]: E0122 12:48:35.659767 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:48:46 crc kubenswrapper[4773]: I0122 12:48:46.658302 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:48:46 crc kubenswrapper[4773]: E0122 12:48:46.659240 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:49:01 crc kubenswrapper[4773]: I0122 12:49:01.657921 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:49:01 crc kubenswrapper[4773]: E0122 12:49:01.658727 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:49:15 crc kubenswrapper[4773]: I0122 12:49:15.658622 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:49:16 crc kubenswrapper[4773]: I0122 12:49:16.008522 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"3e314ba452c2666c9ba4edd4e18131bf4ed26d493a543dbc5797a9abec654fe6"} Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.223644 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-727wz"] Jan 22 12:50:11 crc kubenswrapper[4773]: E0122 12:50:11.224369 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="registry-server" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224381 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="registry-server" Jan 22 12:50:11 crc kubenswrapper[4773]: E0122 12:50:11.224397 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="extract-content" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224403 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="extract-content" Jan 22 12:50:11 crc kubenswrapper[4773]: E0122 12:50:11.224416 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="registry-server" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224422 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="registry-server" Jan 22 12:50:11 crc kubenswrapper[4773]: E0122 12:50:11.224436 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="extract-utilities" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224441 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="extract-utilities" Jan 22 12:50:11 crc kubenswrapper[4773]: E0122 12:50:11.224450 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="extract-content" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224455 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="extract-content" Jan 22 12:50:11 crc kubenswrapper[4773]: E0122 12:50:11.224466 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="extract-utilities" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224472 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="extract-utilities" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224603 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="000a2229-b4ce-4b53-8fd3-47d20f3724ac" containerName="registry-server" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.224619 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5e61b34-b44c-4d50-9e4d-b4baea93331f" containerName="registry-server" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.225631 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.254863 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-727wz"] Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.294443 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfpjg\" (UniqueName: \"kubernetes.io/projected/5d191ec6-bd1c-4276-9df6-725b63892073-kube-api-access-zfpjg\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.294843 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-catalog-content\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.294876 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-utilities\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.396619 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfpjg\" (UniqueName: \"kubernetes.io/projected/5d191ec6-bd1c-4276-9df6-725b63892073-kube-api-access-zfpjg\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.396709 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-catalog-content\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.396745 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-utilities\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.397387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-utilities\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.397502 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-catalog-content\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.418223 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfpjg\" (UniqueName: \"kubernetes.io/projected/5d191ec6-bd1c-4276-9df6-725b63892073-kube-api-access-zfpjg\") pod \"redhat-operators-727wz\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:11 crc kubenswrapper[4773]: I0122 12:50:11.554407 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:12 crc kubenswrapper[4773]: I0122 12:50:12.001542 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-727wz"] Jan 22 12:50:12 crc kubenswrapper[4773]: I0122 12:50:12.506560 4773 generic.go:334] "Generic (PLEG): container finished" podID="5d191ec6-bd1c-4276-9df6-725b63892073" containerID="c0ca20823f5777f83b985ceabdcb48855c494953e4135e6ea9efec8bb5502ca4" exitCode=0 Jan 22 12:50:12 crc kubenswrapper[4773]: I0122 12:50:12.506623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerDied","Data":"c0ca20823f5777f83b985ceabdcb48855c494953e4135e6ea9efec8bb5502ca4"} Jan 22 12:50:12 crc kubenswrapper[4773]: I0122 12:50:12.506816 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerStarted","Data":"bc4c092946a39c1d0e2e471ffa80d7e0880e6abbd8da8746e89504ac2c1ce1ff"} Jan 22 12:50:14 crc kubenswrapper[4773]: I0122 12:50:14.520749 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerStarted","Data":"26428e31a06d9cb3f71e0cea7ebe7605a6df110f5b08eba902a28f8fa80c83d8"} Jan 22 12:50:15 crc kubenswrapper[4773]: I0122 12:50:15.536817 4773 generic.go:334] "Generic (PLEG): container finished" podID="5d191ec6-bd1c-4276-9df6-725b63892073" containerID="26428e31a06d9cb3f71e0cea7ebe7605a6df110f5b08eba902a28f8fa80c83d8" exitCode=0 Jan 22 12:50:15 crc kubenswrapper[4773]: I0122 12:50:15.537328 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerDied","Data":"26428e31a06d9cb3f71e0cea7ebe7605a6df110f5b08eba902a28f8fa80c83d8"} Jan 22 12:50:16 crc kubenswrapper[4773]: I0122 12:50:16.545830 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerStarted","Data":"fb3142a3bb5e6e50072e289a3246af01fb6635b3d46fa2625ebc5226597eb6d1"} Jan 22 12:50:16 crc kubenswrapper[4773]: I0122 12:50:16.574829 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-727wz" podStartSLOduration=2.050958085 podStartE2EDuration="5.574769187s" podCreationTimestamp="2026-01-22 12:50:11 +0000 UTC" firstStartedPulling="2026-01-22 12:50:12.508027979 +0000 UTC m=+3320.086143804" lastFinishedPulling="2026-01-22 12:50:16.031839071 +0000 UTC m=+3323.609954906" observedRunningTime="2026-01-22 12:50:16.565870846 +0000 UTC m=+3324.143986671" watchObservedRunningTime="2026-01-22 12:50:16.574769187 +0000 UTC m=+3324.152885062" Jan 22 12:50:21 crc kubenswrapper[4773]: I0122 12:50:21.555006 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:21 crc kubenswrapper[4773]: I0122 12:50:21.555657 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:22 crc kubenswrapper[4773]: I0122 12:50:22.612996 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-727wz" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="registry-server" probeResult="failure" output=< Jan 22 12:50:22 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 12:50:22 crc kubenswrapper[4773]: > Jan 22 12:50:31 crc kubenswrapper[4773]: I0122 12:50:31.625485 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:31 crc kubenswrapper[4773]: I0122 12:50:31.678194 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:31 crc kubenswrapper[4773]: I0122 12:50:31.872589 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-727wz"] Jan 22 12:50:32 crc kubenswrapper[4773]: I0122 12:50:32.689514 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-727wz" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="registry-server" containerID="cri-o://fb3142a3bb5e6e50072e289a3246af01fb6635b3d46fa2625ebc5226597eb6d1" gracePeriod=2 Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.708010 4773 generic.go:334] "Generic (PLEG): container finished" podID="5d191ec6-bd1c-4276-9df6-725b63892073" containerID="fb3142a3bb5e6e50072e289a3246af01fb6635b3d46fa2625ebc5226597eb6d1" exitCode=0 Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.708264 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerDied","Data":"fb3142a3bb5e6e50072e289a3246af01fb6635b3d46fa2625ebc5226597eb6d1"} Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.909356 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.961025 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-utilities\") pod \"5d191ec6-bd1c-4276-9df6-725b63892073\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.962437 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfpjg\" (UniqueName: \"kubernetes.io/projected/5d191ec6-bd1c-4276-9df6-725b63892073-kube-api-access-zfpjg\") pod \"5d191ec6-bd1c-4276-9df6-725b63892073\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.962322 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-utilities" (OuterVolumeSpecName: "utilities") pod "5d191ec6-bd1c-4276-9df6-725b63892073" (UID: "5d191ec6-bd1c-4276-9df6-725b63892073"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.963387 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-catalog-content\") pod \"5d191ec6-bd1c-4276-9df6-725b63892073\" (UID: \"5d191ec6-bd1c-4276-9df6-725b63892073\") " Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.963841 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:50:34 crc kubenswrapper[4773]: I0122 12:50:34.968871 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d191ec6-bd1c-4276-9df6-725b63892073-kube-api-access-zfpjg" (OuterVolumeSpecName: "kube-api-access-zfpjg") pod "5d191ec6-bd1c-4276-9df6-725b63892073" (UID: "5d191ec6-bd1c-4276-9df6-725b63892073"). InnerVolumeSpecName "kube-api-access-zfpjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.065228 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfpjg\" (UniqueName: \"kubernetes.io/projected/5d191ec6-bd1c-4276-9df6-725b63892073-kube-api-access-zfpjg\") on node \"crc\" DevicePath \"\"" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.109308 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d191ec6-bd1c-4276-9df6-725b63892073" (UID: "5d191ec6-bd1c-4276-9df6-725b63892073"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.166253 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d191ec6-bd1c-4276-9df6-725b63892073-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.717795 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-727wz" event={"ID":"5d191ec6-bd1c-4276-9df6-725b63892073","Type":"ContainerDied","Data":"bc4c092946a39c1d0e2e471ffa80d7e0880e6abbd8da8746e89504ac2c1ce1ff"} Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.718183 4773 scope.go:117] "RemoveContainer" containerID="fb3142a3bb5e6e50072e289a3246af01fb6635b3d46fa2625ebc5226597eb6d1" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.718117 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-727wz" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.746847 4773 scope.go:117] "RemoveContainer" containerID="26428e31a06d9cb3f71e0cea7ebe7605a6df110f5b08eba902a28f8fa80c83d8" Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.760319 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-727wz"] Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.767720 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-727wz"] Jan 22 12:50:35 crc kubenswrapper[4773]: I0122 12:50:35.796251 4773 scope.go:117] "RemoveContainer" containerID="c0ca20823f5777f83b985ceabdcb48855c494953e4135e6ea9efec8bb5502ca4" Jan 22 12:50:36 crc kubenswrapper[4773]: I0122 12:50:36.682136 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" path="/var/lib/kubelet/pods/5d191ec6-bd1c-4276-9df6-725b63892073/volumes" Jan 22 12:51:34 crc kubenswrapper[4773]: I0122 12:51:34.075031 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:51:34 crc kubenswrapper[4773]: I0122 12:51:34.075831 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:52:04 crc kubenswrapper[4773]: I0122 12:52:04.074386 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:52:04 crc kubenswrapper[4773]: I0122 12:52:04.075140 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:52:34 crc kubenswrapper[4773]: I0122 12:52:34.074667 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:52:34 crc kubenswrapper[4773]: I0122 12:52:34.075431 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:52:34 crc kubenswrapper[4773]: I0122 12:52:34.075505 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:52:34 crc kubenswrapper[4773]: I0122 12:52:34.076304 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3e314ba452c2666c9ba4edd4e18131bf4ed26d493a543dbc5797a9abec654fe6"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:52:34 crc kubenswrapper[4773]: I0122 12:52:34.076404 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://3e314ba452c2666c9ba4edd4e18131bf4ed26d493a543dbc5797a9abec654fe6" gracePeriod=600 Jan 22 12:52:35 crc kubenswrapper[4773]: I0122 12:52:35.115684 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="3e314ba452c2666c9ba4edd4e18131bf4ed26d493a543dbc5797a9abec654fe6" exitCode=0 Jan 22 12:52:35 crc kubenswrapper[4773]: I0122 12:52:35.115803 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"3e314ba452c2666c9ba4edd4e18131bf4ed26d493a543dbc5797a9abec654fe6"} Jan 22 12:52:35 crc kubenswrapper[4773]: I0122 12:52:35.116272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9"} Jan 22 12:52:35 crc kubenswrapper[4773]: I0122 12:52:35.116386 4773 scope.go:117] "RemoveContainer" containerID="aac6e05d8ba2dd7bfed8e737efb1a6c1db5a5d09d0d036adfb1f47c9d091b0c1" Jan 22 12:54:34 crc kubenswrapper[4773]: I0122 12:54:34.074588 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:54:34 crc kubenswrapper[4773]: I0122 12:54:34.075328 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:55:04 crc kubenswrapper[4773]: I0122 12:55:04.074636 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:55:04 crc kubenswrapper[4773]: I0122 12:55:04.075345 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.074790 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.075545 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.075667 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.076849 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.077015 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" gracePeriod=600 Jan 22 12:55:34 crc kubenswrapper[4773]: E0122 12:55:34.232587 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.904882 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" exitCode=0 Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.904965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9"} Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.905948 4773 scope.go:117] "RemoveContainer" containerID="3e314ba452c2666c9ba4edd4e18131bf4ed26d493a543dbc5797a9abec654fe6" Jan 22 12:55:34 crc kubenswrapper[4773]: I0122 12:55:34.906331 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:55:34 crc kubenswrapper[4773]: E0122 12:55:34.906564 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:55:47 crc kubenswrapper[4773]: I0122 12:55:47.657669 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:55:47 crc kubenswrapper[4773]: E0122 12:55:47.658515 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:56:01 crc kubenswrapper[4773]: I0122 12:56:01.657711 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:56:01 crc kubenswrapper[4773]: E0122 12:56:01.658401 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:56:15 crc kubenswrapper[4773]: I0122 12:56:15.658663 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:56:15 crc kubenswrapper[4773]: E0122 12:56:15.661136 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:56:26 crc kubenswrapper[4773]: I0122 12:56:26.657796 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:56:26 crc kubenswrapper[4773]: E0122 12:56:26.658766 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:56:40 crc kubenswrapper[4773]: I0122 12:56:40.657919 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:56:40 crc kubenswrapper[4773]: E0122 12:56:40.659859 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:56:52 crc kubenswrapper[4773]: I0122 12:56:52.662057 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:56:52 crc kubenswrapper[4773]: E0122 12:56:52.663894 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:57:05 crc kubenswrapper[4773]: I0122 12:57:05.659980 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:57:05 crc kubenswrapper[4773]: E0122 12:57:05.661125 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:57:18 crc kubenswrapper[4773]: I0122 12:57:18.658948 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:57:18 crc kubenswrapper[4773]: E0122 12:57:18.660023 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:57:29 crc kubenswrapper[4773]: I0122 12:57:29.658597 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:57:29 crc kubenswrapper[4773]: E0122 12:57:29.661410 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:57:43 crc kubenswrapper[4773]: I0122 12:57:43.658460 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:57:43 crc kubenswrapper[4773]: E0122 12:57:43.659373 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:57:57 crc kubenswrapper[4773]: I0122 12:57:57.658431 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:57:57 crc kubenswrapper[4773]: E0122 12:57:57.659616 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.925515 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2jkkr"] Jan 22 12:58:03 crc kubenswrapper[4773]: E0122 12:58:03.926528 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="extract-content" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.926546 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="extract-content" Jan 22 12:58:03 crc kubenswrapper[4773]: E0122 12:58:03.926581 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="registry-server" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.926589 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="registry-server" Jan 22 12:58:03 crc kubenswrapper[4773]: E0122 12:58:03.926609 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="extract-utilities" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.926618 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="extract-utilities" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.926812 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d191ec6-bd1c-4276-9df6-725b63892073" containerName="registry-server" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.928162 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:03 crc kubenswrapper[4773]: I0122 12:58:03.950228 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2jkkr"] Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.027199 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-utilities\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.027261 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-catalog-content\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.027570 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncqsq\" (UniqueName: \"kubernetes.io/projected/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-kube-api-access-ncqsq\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.128902 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncqsq\" (UniqueName: \"kubernetes.io/projected/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-kube-api-access-ncqsq\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.128971 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-utilities\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.129003 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-catalog-content\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.129512 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-catalog-content\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.129813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-utilities\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.154719 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncqsq\" (UniqueName: \"kubernetes.io/projected/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-kube-api-access-ncqsq\") pod \"community-operators-2jkkr\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.259791 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:04 crc kubenswrapper[4773]: I0122 12:58:04.792571 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2jkkr"] Jan 22 12:58:05 crc kubenswrapper[4773]: I0122 12:58:05.766795 4773 generic.go:334] "Generic (PLEG): container finished" podID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerID="a576408fda29a4d2090e9a2a4eb1f76a91b0a3c2e3cb6ce8892b74085a07e414" exitCode=0 Jan 22 12:58:05 crc kubenswrapper[4773]: I0122 12:58:05.766924 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerDied","Data":"a576408fda29a4d2090e9a2a4eb1f76a91b0a3c2e3cb6ce8892b74085a07e414"} Jan 22 12:58:05 crc kubenswrapper[4773]: I0122 12:58:05.767127 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerStarted","Data":"d207242e0612756ce5a9be70e54d13e8c7379128fba6790573e1b6b3e8c4d86b"} Jan 22 12:58:05 crc kubenswrapper[4773]: I0122 12:58:05.768942 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 12:58:06 crc kubenswrapper[4773]: I0122 12:58:06.776670 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerStarted","Data":"53d06a58d83ec88c6d34ff8b21406267ef2b9ed07d9d3255e4dd018bb6e36b64"} Jan 22 12:58:07 crc kubenswrapper[4773]: I0122 12:58:07.789979 4773 generic.go:334] "Generic (PLEG): container finished" podID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerID="53d06a58d83ec88c6d34ff8b21406267ef2b9ed07d9d3255e4dd018bb6e36b64" exitCode=0 Jan 22 12:58:07 crc kubenswrapper[4773]: I0122 12:58:07.790054 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerDied","Data":"53d06a58d83ec88c6d34ff8b21406267ef2b9ed07d9d3255e4dd018bb6e36b64"} Jan 22 12:58:08 crc kubenswrapper[4773]: I0122 12:58:08.804735 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerStarted","Data":"e062e6225cc0b457b90d745e88def067b30e786cbd456912d803d356eb7f9878"} Jan 22 12:58:08 crc kubenswrapper[4773]: I0122 12:58:08.842687 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2jkkr" podStartSLOduration=3.416683864 podStartE2EDuration="5.842622774s" podCreationTimestamp="2026-01-22 12:58:03 +0000 UTC" firstStartedPulling="2026-01-22 12:58:05.768479306 +0000 UTC m=+3793.346595171" lastFinishedPulling="2026-01-22 12:58:08.194418256 +0000 UTC m=+3795.772534081" observedRunningTime="2026-01-22 12:58:08.829879314 +0000 UTC m=+3796.407995179" watchObservedRunningTime="2026-01-22 12:58:08.842622774 +0000 UTC m=+3796.420738629" Jan 22 12:58:10 crc kubenswrapper[4773]: I0122 12:58:10.658761 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:58:10 crc kubenswrapper[4773]: E0122 12:58:10.659580 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:58:14 crc kubenswrapper[4773]: I0122 12:58:14.260358 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:14 crc kubenswrapper[4773]: I0122 12:58:14.261499 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:14 crc kubenswrapper[4773]: I0122 12:58:14.332053 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:14 crc kubenswrapper[4773]: I0122 12:58:14.902931 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:14 crc kubenswrapper[4773]: I0122 12:58:14.963476 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2jkkr"] Jan 22 12:58:16 crc kubenswrapper[4773]: I0122 12:58:16.863727 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2jkkr" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="registry-server" containerID="cri-o://e062e6225cc0b457b90d745e88def067b30e786cbd456912d803d356eb7f9878" gracePeriod=2 Jan 22 12:58:17 crc kubenswrapper[4773]: I0122 12:58:17.877800 4773 generic.go:334] "Generic (PLEG): container finished" podID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerID="e062e6225cc0b457b90d745e88def067b30e786cbd456912d803d356eb7f9878" exitCode=0 Jan 22 12:58:17 crc kubenswrapper[4773]: I0122 12:58:17.877874 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerDied","Data":"e062e6225cc0b457b90d745e88def067b30e786cbd456912d803d356eb7f9878"} Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.390181 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.458253 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-utilities\") pod \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.458473 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-catalog-content\") pod \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.458524 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncqsq\" (UniqueName: \"kubernetes.io/projected/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-kube-api-access-ncqsq\") pod \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\" (UID: \"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9\") " Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.459410 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-utilities" (OuterVolumeSpecName: "utilities") pod "0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" (UID: "0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.466125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-kube-api-access-ncqsq" (OuterVolumeSpecName: "kube-api-access-ncqsq") pod "0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" (UID: "0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9"). InnerVolumeSpecName "kube-api-access-ncqsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.516653 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" (UID: "0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.559990 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.560038 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncqsq\" (UniqueName: \"kubernetes.io/projected/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-kube-api-access-ncqsq\") on node \"crc\" DevicePath \"\"" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.560056 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.892869 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2jkkr" event={"ID":"0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9","Type":"ContainerDied","Data":"d207242e0612756ce5a9be70e54d13e8c7379128fba6790573e1b6b3e8c4d86b"} Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.892939 4773 scope.go:117] "RemoveContainer" containerID="e062e6225cc0b457b90d745e88def067b30e786cbd456912d803d356eb7f9878" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.892977 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2jkkr" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.919625 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2jkkr"] Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.927957 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2jkkr"] Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.932737 4773 scope.go:117] "RemoveContainer" containerID="53d06a58d83ec88c6d34ff8b21406267ef2b9ed07d9d3255e4dd018bb6e36b64" Jan 22 12:58:18 crc kubenswrapper[4773]: I0122 12:58:18.957136 4773 scope.go:117] "RemoveContainer" containerID="a576408fda29a4d2090e9a2a4eb1f76a91b0a3c2e3cb6ce8892b74085a07e414" Jan 22 12:58:20 crc kubenswrapper[4773]: I0122 12:58:20.672250 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" path="/var/lib/kubelet/pods/0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9/volumes" Jan 22 12:58:24 crc kubenswrapper[4773]: I0122 12:58:24.658633 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:58:24 crc kubenswrapper[4773]: E0122 12:58:24.659569 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:58:37 crc kubenswrapper[4773]: I0122 12:58:37.658056 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:58:37 crc kubenswrapper[4773]: E0122 12:58:37.658782 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:58:51 crc kubenswrapper[4773]: I0122 12:58:51.658657 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:58:51 crc kubenswrapper[4773]: E0122 12:58:51.659779 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:59:06 crc kubenswrapper[4773]: I0122 12:59:06.662239 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:59:06 crc kubenswrapper[4773]: E0122 12:59:06.662918 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:59:19 crc kubenswrapper[4773]: I0122 12:59:19.658700 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:59:19 crc kubenswrapper[4773]: E0122 12:59:19.661091 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:59:31 crc kubenswrapper[4773]: I0122 12:59:31.659195 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:59:31 crc kubenswrapper[4773]: E0122 12:59:31.659944 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:59:43 crc kubenswrapper[4773]: I0122 12:59:43.658144 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:59:43 crc kubenswrapper[4773]: E0122 12:59:43.659219 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 12:59:56 crc kubenswrapper[4773]: I0122 12:59:56.658508 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 12:59:56 crc kubenswrapper[4773]: E0122 12:59:56.659252 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.186377 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb"] Jan 22 13:00:00 crc kubenswrapper[4773]: E0122 13:00:00.187484 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="extract-content" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.187503 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="extract-content" Jan 22 13:00:00 crc kubenswrapper[4773]: E0122 13:00:00.187517 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="registry-server" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.187525 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="registry-server" Jan 22 13:00:00 crc kubenswrapper[4773]: E0122 13:00:00.187557 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="extract-utilities" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.187564 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="extract-utilities" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.187761 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e6318b9-ff1b-4d50-ae9c-0ca22900dfa9" containerName="registry-server" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.188519 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.190578 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.191127 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.194104 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb"] Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.266863 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91935200-38fc-41c7-ac2c-af16ef175984-secret-volume\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.267233 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91935200-38fc-41c7-ac2c-af16ef175984-config-volume\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.267311 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz92p\" (UniqueName: \"kubernetes.io/projected/91935200-38fc-41c7-ac2c-af16ef175984-kube-api-access-cz92p\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.368043 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91935200-38fc-41c7-ac2c-af16ef175984-secret-volume\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.368105 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91935200-38fc-41c7-ac2c-af16ef175984-config-volume\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.368149 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz92p\" (UniqueName: \"kubernetes.io/projected/91935200-38fc-41c7-ac2c-af16ef175984-kube-api-access-cz92p\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.369576 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91935200-38fc-41c7-ac2c-af16ef175984-config-volume\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.375759 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91935200-38fc-41c7-ac2c-af16ef175984-secret-volume\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.389352 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz92p\" (UniqueName: \"kubernetes.io/projected/91935200-38fc-41c7-ac2c-af16ef175984-kube-api-access-cz92p\") pod \"collect-profiles-29484780-9bdxb\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.517550 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:00 crc kubenswrapper[4773]: I0122 13:00:00.978129 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb"] Jan 22 13:00:00 crc kubenswrapper[4773]: W0122 13:00:00.982828 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91935200_38fc_41c7_ac2c_af16ef175984.slice/crio-fddca0a080a762814049941ffeaf01ff59b106f3d0ea761421862e8472082cfe WatchSource:0}: Error finding container fddca0a080a762814049941ffeaf01ff59b106f3d0ea761421862e8472082cfe: Status 404 returned error can't find the container with id fddca0a080a762814049941ffeaf01ff59b106f3d0ea761421862e8472082cfe Jan 22 13:00:01 crc kubenswrapper[4773]: I0122 13:00:01.121047 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" event={"ID":"91935200-38fc-41c7-ac2c-af16ef175984","Type":"ContainerStarted","Data":"fddca0a080a762814049941ffeaf01ff59b106f3d0ea761421862e8472082cfe"} Jan 22 13:00:02 crc kubenswrapper[4773]: I0122 13:00:02.137526 4773 generic.go:334] "Generic (PLEG): container finished" podID="91935200-38fc-41c7-ac2c-af16ef175984" containerID="f3fa9825be045e2cf587331db081a69f769821a9699a4a66fdeae97704bd3d26" exitCode=0 Jan 22 13:00:02 crc kubenswrapper[4773]: I0122 13:00:02.137742 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" event={"ID":"91935200-38fc-41c7-ac2c-af16ef175984","Type":"ContainerDied","Data":"f3fa9825be045e2cf587331db081a69f769821a9699a4a66fdeae97704bd3d26"} Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.455482 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.542269 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz92p\" (UniqueName: \"kubernetes.io/projected/91935200-38fc-41c7-ac2c-af16ef175984-kube-api-access-cz92p\") pod \"91935200-38fc-41c7-ac2c-af16ef175984\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.542345 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91935200-38fc-41c7-ac2c-af16ef175984-config-volume\") pod \"91935200-38fc-41c7-ac2c-af16ef175984\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.542432 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91935200-38fc-41c7-ac2c-af16ef175984-secret-volume\") pod \"91935200-38fc-41c7-ac2c-af16ef175984\" (UID: \"91935200-38fc-41c7-ac2c-af16ef175984\") " Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.544187 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91935200-38fc-41c7-ac2c-af16ef175984-config-volume" (OuterVolumeSpecName: "config-volume") pod "91935200-38fc-41c7-ac2c-af16ef175984" (UID: "91935200-38fc-41c7-ac2c-af16ef175984"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.597726 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91935200-38fc-41c7-ac2c-af16ef175984-kube-api-access-cz92p" (OuterVolumeSpecName: "kube-api-access-cz92p") pod "91935200-38fc-41c7-ac2c-af16ef175984" (UID: "91935200-38fc-41c7-ac2c-af16ef175984"). InnerVolumeSpecName "kube-api-access-cz92p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.597840 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91935200-38fc-41c7-ac2c-af16ef175984-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "91935200-38fc-41c7-ac2c-af16ef175984" (UID: "91935200-38fc-41c7-ac2c-af16ef175984"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.643776 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz92p\" (UniqueName: \"kubernetes.io/projected/91935200-38fc-41c7-ac2c-af16ef175984-kube-api-access-cz92p\") on node \"crc\" DevicePath \"\"" Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.643807 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/91935200-38fc-41c7-ac2c-af16ef175984-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:00:03 crc kubenswrapper[4773]: I0122 13:00:03.643815 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/91935200-38fc-41c7-ac2c-af16ef175984-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:00:04 crc kubenswrapper[4773]: I0122 13:00:04.151410 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" event={"ID":"91935200-38fc-41c7-ac2c-af16ef175984","Type":"ContainerDied","Data":"fddca0a080a762814049941ffeaf01ff59b106f3d0ea761421862e8472082cfe"} Jan 22 13:00:04 crc kubenswrapper[4773]: I0122 13:00:04.151480 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fddca0a080a762814049941ffeaf01ff59b106f3d0ea761421862e8472082cfe" Jan 22 13:00:04 crc kubenswrapper[4773]: I0122 13:00:04.151481 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb" Jan 22 13:00:04 crc kubenswrapper[4773]: I0122 13:00:04.546972 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj"] Jan 22 13:00:04 crc kubenswrapper[4773]: I0122 13:00:04.552446 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484735-f2bcj"] Jan 22 13:00:04 crc kubenswrapper[4773]: I0122 13:00:04.680515 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e288fcc-8bcb-4e2e-83fe-c17aae51ad38" path="/var/lib/kubelet/pods/9e288fcc-8bcb-4e2e-83fe-c17aae51ad38/volumes" Jan 22 13:00:09 crc kubenswrapper[4773]: I0122 13:00:09.658925 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 13:00:09 crc kubenswrapper[4773]: E0122 13:00:09.659789 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:00:21 crc kubenswrapper[4773]: I0122 13:00:21.657903 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 13:00:21 crc kubenswrapper[4773]: E0122 13:00:21.658819 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:00:36 crc kubenswrapper[4773]: I0122 13:00:36.660082 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 13:00:37 crc kubenswrapper[4773]: I0122 13:00:37.404522 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"3e4c557e60566fa7490dab6f394b04defbcc9627b9ce632b202bd8c9b884a9dd"} Jan 22 13:01:03 crc kubenswrapper[4773]: I0122 13:01:03.627229 4773 scope.go:117] "RemoveContainer" containerID="1338b4148bfcdb8e0283799088b4f0f13bb8d6a872000ea98faf82042d881516" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.855765 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hkspc"] Jan 22 13:01:11 crc kubenswrapper[4773]: E0122 13:01:11.857489 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91935200-38fc-41c7-ac2c-af16ef175984" containerName="collect-profiles" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.857527 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="91935200-38fc-41c7-ac2c-af16ef175984" containerName="collect-profiles" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.857917 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="91935200-38fc-41c7-ac2c-af16ef175984" containerName="collect-profiles" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.860008 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.897961 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hkspc"] Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.923502 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bnjd\" (UniqueName: \"kubernetes.io/projected/2ff075b1-5585-45af-ae5c-35b8858ef0d6-kube-api-access-9bnjd\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.923598 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-utilities\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:11 crc kubenswrapper[4773]: I0122 13:01:11.923731 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-catalog-content\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.025371 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-catalog-content\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.025485 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bnjd\" (UniqueName: \"kubernetes.io/projected/2ff075b1-5585-45af-ae5c-35b8858ef0d6-kube-api-access-9bnjd\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.025571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-utilities\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.026244 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-utilities\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.026575 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-catalog-content\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.065508 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bnjd\" (UniqueName: \"kubernetes.io/projected/2ff075b1-5585-45af-ae5c-35b8858ef0d6-kube-api-access-9bnjd\") pod \"redhat-operators-hkspc\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.202653 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.559510 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hkspc"] Jan 22 13:01:12 crc kubenswrapper[4773]: I0122 13:01:12.745358 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerStarted","Data":"dd762b90b8d284b0f4a1f2990f5edd7345c41b6731c03dcde72b4fdaa3febe1a"} Jan 22 13:01:13 crc kubenswrapper[4773]: I0122 13:01:13.755688 4773 generic.go:334] "Generic (PLEG): container finished" podID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerID="74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa" exitCode=0 Jan 22 13:01:13 crc kubenswrapper[4773]: I0122 13:01:13.755866 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerDied","Data":"74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa"} Jan 22 13:01:14 crc kubenswrapper[4773]: I0122 13:01:14.764436 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerStarted","Data":"b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca"} Jan 22 13:01:16 crc kubenswrapper[4773]: I0122 13:01:16.784752 4773 generic.go:334] "Generic (PLEG): container finished" podID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerID="b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca" exitCode=0 Jan 22 13:01:16 crc kubenswrapper[4773]: I0122 13:01:16.784870 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerDied","Data":"b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca"} Jan 22 13:01:18 crc kubenswrapper[4773]: I0122 13:01:18.821485 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerStarted","Data":"70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd"} Jan 22 13:01:18 crc kubenswrapper[4773]: I0122 13:01:18.853733 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hkspc" podStartSLOduration=4.408209723 podStartE2EDuration="7.853704009s" podCreationTimestamp="2026-01-22 13:01:11 +0000 UTC" firstStartedPulling="2026-01-22 13:01:13.757713305 +0000 UTC m=+3981.335829130" lastFinishedPulling="2026-01-22 13:01:17.203207591 +0000 UTC m=+3984.781323416" observedRunningTime="2026-01-22 13:01:18.847704179 +0000 UTC m=+3986.425820024" watchObservedRunningTime="2026-01-22 13:01:18.853704009 +0000 UTC m=+3986.431819834" Jan 22 13:01:22 crc kubenswrapper[4773]: I0122 13:01:22.203505 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:22 crc kubenswrapper[4773]: I0122 13:01:22.205132 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:23 crc kubenswrapper[4773]: I0122 13:01:23.307359 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hkspc" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="registry-server" probeResult="failure" output=< Jan 22 13:01:23 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:01:23 crc kubenswrapper[4773]: > Jan 22 13:01:32 crc kubenswrapper[4773]: I0122 13:01:32.276435 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:32 crc kubenswrapper[4773]: I0122 13:01:32.373673 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:32 crc kubenswrapper[4773]: I0122 13:01:32.540000 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hkspc"] Jan 22 13:01:33 crc kubenswrapper[4773]: I0122 13:01:33.961932 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hkspc" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="registry-server" containerID="cri-o://70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd" gracePeriod=2 Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.451004 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.584670 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-utilities\") pod \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.584854 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bnjd\" (UniqueName: \"kubernetes.io/projected/2ff075b1-5585-45af-ae5c-35b8858ef0d6-kube-api-access-9bnjd\") pod \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.584929 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-catalog-content\") pod \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\" (UID: \"2ff075b1-5585-45af-ae5c-35b8858ef0d6\") " Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.585905 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-utilities" (OuterVolumeSpecName: "utilities") pod "2ff075b1-5585-45af-ae5c-35b8858ef0d6" (UID: "2ff075b1-5585-45af-ae5c-35b8858ef0d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.597543 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ff075b1-5585-45af-ae5c-35b8858ef0d6-kube-api-access-9bnjd" (OuterVolumeSpecName: "kube-api-access-9bnjd") pod "2ff075b1-5585-45af-ae5c-35b8858ef0d6" (UID: "2ff075b1-5585-45af-ae5c-35b8858ef0d6"). InnerVolumeSpecName "kube-api-access-9bnjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.686423 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.686468 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bnjd\" (UniqueName: \"kubernetes.io/projected/2ff075b1-5585-45af-ae5c-35b8858ef0d6-kube-api-access-9bnjd\") on node \"crc\" DevicePath \"\"" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.731069 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ff075b1-5585-45af-ae5c-35b8858ef0d6" (UID: "2ff075b1-5585-45af-ae5c-35b8858ef0d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.787722 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ff075b1-5585-45af-ae5c-35b8858ef0d6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.974705 4773 generic.go:334] "Generic (PLEG): container finished" podID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerID="70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd" exitCode=0 Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.974772 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hkspc" Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.975528 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerDied","Data":"70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd"} Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.975651 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hkspc" event={"ID":"2ff075b1-5585-45af-ae5c-35b8858ef0d6","Type":"ContainerDied","Data":"dd762b90b8d284b0f4a1f2990f5edd7345c41b6731c03dcde72b4fdaa3febe1a"} Jan 22 13:01:34 crc kubenswrapper[4773]: I0122 13:01:34.975690 4773 scope.go:117] "RemoveContainer" containerID="70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.010260 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hkspc"] Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.016693 4773 scope.go:117] "RemoveContainer" containerID="b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.018733 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hkspc"] Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.051793 4773 scope.go:117] "RemoveContainer" containerID="74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.081460 4773 scope.go:117] "RemoveContainer" containerID="70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd" Jan 22 13:01:35 crc kubenswrapper[4773]: E0122 13:01:35.081952 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd\": container with ID starting with 70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd not found: ID does not exist" containerID="70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.082016 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd"} err="failed to get container status \"70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd\": rpc error: code = NotFound desc = could not find container \"70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd\": container with ID starting with 70b83da6e3b972d153be2fb5858ee471640bf34b47647871d2a90ced9b2532dd not found: ID does not exist" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.082044 4773 scope.go:117] "RemoveContainer" containerID="b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca" Jan 22 13:01:35 crc kubenswrapper[4773]: E0122 13:01:35.082405 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca\": container with ID starting with b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca not found: ID does not exist" containerID="b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.082423 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca"} err="failed to get container status \"b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca\": rpc error: code = NotFound desc = could not find container \"b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca\": container with ID starting with b44a02e599cec78374b835acbb0b9d0e3165c8f248b0eb6522bcacca279a0bca not found: ID does not exist" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.082436 4773 scope.go:117] "RemoveContainer" containerID="74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa" Jan 22 13:01:35 crc kubenswrapper[4773]: E0122 13:01:35.082703 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa\": container with ID starting with 74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa not found: ID does not exist" containerID="74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa" Jan 22 13:01:35 crc kubenswrapper[4773]: I0122 13:01:35.082718 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa"} err="failed to get container status \"74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa\": rpc error: code = NotFound desc = could not find container \"74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa\": container with ID starting with 74e4190d95cda1cdde4e6c281be7b9fffc2ec4436c6861222f01db693aff0dfa not found: ID does not exist" Jan 22 13:01:36 crc kubenswrapper[4773]: I0122 13:01:36.665776 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" path="/var/lib/kubelet/pods/2ff075b1-5585-45af-ae5c-35b8858ef0d6/volumes" Jan 22 13:03:04 crc kubenswrapper[4773]: I0122 13:03:04.074205 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:03:04 crc kubenswrapper[4773]: I0122 13:03:04.075029 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.413074 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rv4ww"] Jan 22 13:03:26 crc kubenswrapper[4773]: E0122 13:03:26.413989 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="extract-content" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.414003 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="extract-content" Jan 22 13:03:26 crc kubenswrapper[4773]: E0122 13:03:26.414030 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="extract-utilities" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.414039 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="extract-utilities" Jan 22 13:03:26 crc kubenswrapper[4773]: E0122 13:03:26.414056 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="registry-server" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.414062 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="registry-server" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.414184 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ff075b1-5585-45af-ae5c-35b8858ef0d6" containerName="registry-server" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.415241 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.439455 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rv4ww"] Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.563807 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-catalog-content\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.563929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz766\" (UniqueName: \"kubernetes.io/projected/ba9eace9-5215-4a6c-9d10-e5b518b149e8-kube-api-access-fz766\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.564043 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-utilities\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.665137 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-utilities\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.665227 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-catalog-content\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.665314 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz766\" (UniqueName: \"kubernetes.io/projected/ba9eace9-5215-4a6c-9d10-e5b518b149e8-kube-api-access-fz766\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.665841 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-utilities\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.666482 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-catalog-content\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.686371 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz766\" (UniqueName: \"kubernetes.io/projected/ba9eace9-5215-4a6c-9d10-e5b518b149e8-kube-api-access-fz766\") pod \"certified-operators-rv4ww\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:26 crc kubenswrapper[4773]: I0122 13:03:26.734948 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:27 crc kubenswrapper[4773]: I0122 13:03:27.234624 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rv4ww"] Jan 22 13:03:28 crc kubenswrapper[4773]: I0122 13:03:28.106428 4773 generic.go:334] "Generic (PLEG): container finished" podID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerID="e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f" exitCode=0 Jan 22 13:03:28 crc kubenswrapper[4773]: I0122 13:03:28.106581 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerDied","Data":"e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f"} Jan 22 13:03:28 crc kubenswrapper[4773]: I0122 13:03:28.106655 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerStarted","Data":"6bbec19bc774210a26de936ab813ff43aaf3771ea52e8de2b3a0efc3f46a6838"} Jan 22 13:03:28 crc kubenswrapper[4773]: I0122 13:03:28.108421 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:03:29 crc kubenswrapper[4773]: I0122 13:03:29.246684 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerStarted","Data":"7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4"} Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.259688 4773 generic.go:334] "Generic (PLEG): container finished" podID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerID="7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4" exitCode=0 Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.259754 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerDied","Data":"7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4"} Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.625462 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ggf"] Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.632847 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.643078 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ggf"] Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.666482 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkv6s\" (UniqueName: \"kubernetes.io/projected/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-kube-api-access-pkv6s\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.666582 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-catalog-content\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.666670 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-utilities\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.767941 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-catalog-content\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.768035 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-utilities\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.768152 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkv6s\" (UniqueName: \"kubernetes.io/projected/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-kube-api-access-pkv6s\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.768783 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-catalog-content\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.768807 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-utilities\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.792461 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkv6s\" (UniqueName: \"kubernetes.io/projected/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-kube-api-access-pkv6s\") pod \"redhat-marketplace-l5ggf\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:30 crc kubenswrapper[4773]: I0122 13:03:30.971421 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:31 crc kubenswrapper[4773]: I0122 13:03:31.256293 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ggf"] Jan 22 13:03:31 crc kubenswrapper[4773]: I0122 13:03:31.314685 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerStarted","Data":"6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a"} Jan 22 13:03:31 crc kubenswrapper[4773]: I0122 13:03:31.352769 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rv4ww" podStartSLOduration=2.724817837 podStartE2EDuration="5.35273784s" podCreationTimestamp="2026-01-22 13:03:26 +0000 UTC" firstStartedPulling="2026-01-22 13:03:28.108168631 +0000 UTC m=+4115.686284456" lastFinishedPulling="2026-01-22 13:03:30.736088624 +0000 UTC m=+4118.314204459" observedRunningTime="2026-01-22 13:03:31.352226047 +0000 UTC m=+4118.930341882" watchObservedRunningTime="2026-01-22 13:03:31.35273784 +0000 UTC m=+4118.930853665" Jan 22 13:03:32 crc kubenswrapper[4773]: I0122 13:03:32.328023 4773 generic.go:334] "Generic (PLEG): container finished" podID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerID="c1819668f26b7108dd6c35587381bb7c3e40772d714fe7bd21a9fb9cebe271af" exitCode=0 Jan 22 13:03:32 crc kubenswrapper[4773]: I0122 13:03:32.328121 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerDied","Data":"c1819668f26b7108dd6c35587381bb7c3e40772d714fe7bd21a9fb9cebe271af"} Jan 22 13:03:32 crc kubenswrapper[4773]: I0122 13:03:32.328674 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerStarted","Data":"63c638a9796747ab8b3d53a658143cfee920876604323d46bb7e1251e86b5f34"} Jan 22 13:03:33 crc kubenswrapper[4773]: I0122 13:03:33.527462 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerStarted","Data":"e50d77ce848f221091fc588e924d5e1bf07b8b3fc1f03be8363e1cb865db3ac3"} Jan 22 13:03:34 crc kubenswrapper[4773]: I0122 13:03:34.074998 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:03:34 crc kubenswrapper[4773]: I0122 13:03:34.075113 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:03:34 crc kubenswrapper[4773]: I0122 13:03:34.535996 4773 generic.go:334] "Generic (PLEG): container finished" podID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerID="e50d77ce848f221091fc588e924d5e1bf07b8b3fc1f03be8363e1cb865db3ac3" exitCode=0 Jan 22 13:03:34 crc kubenswrapper[4773]: I0122 13:03:34.536043 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerDied","Data":"e50d77ce848f221091fc588e924d5e1bf07b8b3fc1f03be8363e1cb865db3ac3"} Jan 22 13:03:35 crc kubenswrapper[4773]: I0122 13:03:35.547236 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerStarted","Data":"6963f9e96ade1a34c5943a77395333e27a35f10e8720a732cb6128f70ab79f0b"} Jan 22 13:03:35 crc kubenswrapper[4773]: I0122 13:03:35.570124 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l5ggf" podStartSLOduration=2.540274883 podStartE2EDuration="5.570102982s" podCreationTimestamp="2026-01-22 13:03:30 +0000 UTC" firstStartedPulling="2026-01-22 13:03:32.334492634 +0000 UTC m=+4119.912608469" lastFinishedPulling="2026-01-22 13:03:35.364320703 +0000 UTC m=+4122.942436568" observedRunningTime="2026-01-22 13:03:35.563496397 +0000 UTC m=+4123.141612222" watchObservedRunningTime="2026-01-22 13:03:35.570102982 +0000 UTC m=+4123.148218807" Jan 22 13:03:36 crc kubenswrapper[4773]: I0122 13:03:36.735961 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:36 crc kubenswrapper[4773]: I0122 13:03:36.736027 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:36 crc kubenswrapper[4773]: I0122 13:03:36.794703 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:37 crc kubenswrapper[4773]: I0122 13:03:37.649037 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:38 crc kubenswrapper[4773]: I0122 13:03:38.610720 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rv4ww"] Jan 22 13:03:39 crc kubenswrapper[4773]: I0122 13:03:39.587425 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rv4ww" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="registry-server" containerID="cri-o://6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a" gracePeriod=2 Jan 22 13:03:40 crc kubenswrapper[4773]: I0122 13:03:40.971969 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:40 crc kubenswrapper[4773]: I0122 13:03:40.972533 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.052923 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.525128 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.609938 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv4ww" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.609979 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerDied","Data":"6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a"} Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.609927 4773 generic.go:334] "Generic (PLEG): container finished" podID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerID="6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a" exitCode=0 Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.610171 4773 scope.go:117] "RemoveContainer" containerID="6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.610424 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv4ww" event={"ID":"ba9eace9-5215-4a6c-9d10-e5b518b149e8","Type":"ContainerDied","Data":"6bbec19bc774210a26de936ab813ff43aaf3771ea52e8de2b3a0efc3f46a6838"} Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.632611 4773 scope.go:117] "RemoveContainer" containerID="7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.636869 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-utilities\") pod \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.636962 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz766\" (UniqueName: \"kubernetes.io/projected/ba9eace9-5215-4a6c-9d10-e5b518b149e8-kube-api-access-fz766\") pod \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.637057 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-catalog-content\") pod \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\" (UID: \"ba9eace9-5215-4a6c-9d10-e5b518b149e8\") " Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.639057 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-utilities" (OuterVolumeSpecName: "utilities") pod "ba9eace9-5215-4a6c-9d10-e5b518b149e8" (UID: "ba9eace9-5215-4a6c-9d10-e5b518b149e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.653774 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba9eace9-5215-4a6c-9d10-e5b518b149e8-kube-api-access-fz766" (OuterVolumeSpecName: "kube-api-access-fz766") pod "ba9eace9-5215-4a6c-9d10-e5b518b149e8" (UID: "ba9eace9-5215-4a6c-9d10-e5b518b149e8"). InnerVolumeSpecName "kube-api-access-fz766". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.654233 4773 scope.go:117] "RemoveContainer" containerID="e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.690257 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.701867 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba9eace9-5215-4a6c-9d10-e5b518b149e8" (UID: "ba9eace9-5215-4a6c-9d10-e5b518b149e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.728774 4773 scope.go:117] "RemoveContainer" containerID="6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a" Jan 22 13:03:41 crc kubenswrapper[4773]: E0122 13:03:41.729264 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a\": container with ID starting with 6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a not found: ID does not exist" containerID="6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.729387 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a"} err="failed to get container status \"6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a\": rpc error: code = NotFound desc = could not find container \"6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a\": container with ID starting with 6bd15dc08bfff530c41c8a969b8562e8bc9d4c61c55b26b42ed92527ec88582a not found: ID does not exist" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.729435 4773 scope.go:117] "RemoveContainer" containerID="7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4" Jan 22 13:03:41 crc kubenswrapper[4773]: E0122 13:03:41.729904 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4\": container with ID starting with 7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4 not found: ID does not exist" containerID="7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.729971 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4"} err="failed to get container status \"7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4\": rpc error: code = NotFound desc = could not find container \"7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4\": container with ID starting with 7f79899c793ed0582de38bd0ccbe0a91a19f4525a9849b0bc4e78ede579dace4 not found: ID does not exist" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.729998 4773 scope.go:117] "RemoveContainer" containerID="e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f" Jan 22 13:03:41 crc kubenswrapper[4773]: E0122 13:03:41.730355 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f\": container with ID starting with e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f not found: ID does not exist" containerID="e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.730507 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f"} err="failed to get container status \"e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f\": rpc error: code = NotFound desc = could not find container \"e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f\": container with ID starting with e7b6e21e66ae70475bf4fb4030c58f04e146d367fb48e3afb60b8688be0e299f not found: ID does not exist" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.739817 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz766\" (UniqueName: \"kubernetes.io/projected/ba9eace9-5215-4a6c-9d10-e5b518b149e8-kube-api-access-fz766\") on node \"crc\" DevicePath \"\"" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.739858 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.739876 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba9eace9-5215-4a6c-9d10-e5b518b149e8-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.974075 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rv4ww"] Jan 22 13:03:41 crc kubenswrapper[4773]: I0122 13:03:41.988427 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rv4ww"] Jan 22 13:03:42 crc kubenswrapper[4773]: I0122 13:03:42.665662 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" path="/var/lib/kubelet/pods/ba9eace9-5215-4a6c-9d10-e5b518b149e8/volumes" Jan 22 13:03:42 crc kubenswrapper[4773]: I0122 13:03:42.807529 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ggf"] Jan 22 13:03:43 crc kubenswrapper[4773]: I0122 13:03:43.625768 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l5ggf" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="registry-server" containerID="cri-o://6963f9e96ade1a34c5943a77395333e27a35f10e8720a732cb6128f70ab79f0b" gracePeriod=2 Jan 22 13:03:44 crc kubenswrapper[4773]: I0122 13:03:44.635911 4773 generic.go:334] "Generic (PLEG): container finished" podID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerID="6963f9e96ade1a34c5943a77395333e27a35f10e8720a732cb6128f70ab79f0b" exitCode=0 Jan 22 13:03:44 crc kubenswrapper[4773]: I0122 13:03:44.636220 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerDied","Data":"6963f9e96ade1a34c5943a77395333e27a35f10e8720a732cb6128f70ab79f0b"} Jan 22 13:03:44 crc kubenswrapper[4773]: I0122 13:03:44.983174 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.052367 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-catalog-content\") pod \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.052529 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-utilities\") pod \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.052674 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkv6s\" (UniqueName: \"kubernetes.io/projected/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-kube-api-access-pkv6s\") pod \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\" (UID: \"fe1abe4b-1d4e-422f-9de0-11d96a85fa80\") " Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.054080 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-utilities" (OuterVolumeSpecName: "utilities") pod "fe1abe4b-1d4e-422f-9de0-11d96a85fa80" (UID: "fe1abe4b-1d4e-422f-9de0-11d96a85fa80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.062573 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-kube-api-access-pkv6s" (OuterVolumeSpecName: "kube-api-access-pkv6s") pod "fe1abe4b-1d4e-422f-9de0-11d96a85fa80" (UID: "fe1abe4b-1d4e-422f-9de0-11d96a85fa80"). InnerVolumeSpecName "kube-api-access-pkv6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.076783 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe1abe4b-1d4e-422f-9de0-11d96a85fa80" (UID: "fe1abe4b-1d4e-422f-9de0-11d96a85fa80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.155796 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.155842 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.155856 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkv6s\" (UniqueName: \"kubernetes.io/projected/fe1abe4b-1d4e-422f-9de0-11d96a85fa80-kube-api-access-pkv6s\") on node \"crc\" DevicePath \"\"" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.652547 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l5ggf" event={"ID":"fe1abe4b-1d4e-422f-9de0-11d96a85fa80","Type":"ContainerDied","Data":"63c638a9796747ab8b3d53a658143cfee920876604323d46bb7e1251e86b5f34"} Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.652671 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l5ggf" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.652680 4773 scope.go:117] "RemoveContainer" containerID="6963f9e96ade1a34c5943a77395333e27a35f10e8720a732cb6128f70ab79f0b" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.688564 4773 scope.go:117] "RemoveContainer" containerID="e50d77ce848f221091fc588e924d5e1bf07b8b3fc1f03be8363e1cb865db3ac3" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.865078 4773 scope.go:117] "RemoveContainer" containerID="c1819668f26b7108dd6c35587381bb7c3e40772d714fe7bd21a9fb9cebe271af" Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.868396 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ggf"] Jan 22 13:03:45 crc kubenswrapper[4773]: I0122 13:03:45.874420 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l5ggf"] Jan 22 13:03:46 crc kubenswrapper[4773]: I0122 13:03:46.665980 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" path="/var/lib/kubelet/pods/fe1abe4b-1d4e-422f-9de0-11d96a85fa80/volumes" Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.074587 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.075510 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.075590 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.076609 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3e4c557e60566fa7490dab6f394b04defbcc9627b9ce632b202bd8c9b884a9dd"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.076697 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://3e4c557e60566fa7490dab6f394b04defbcc9627b9ce632b202bd8c9b884a9dd" gracePeriod=600 Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.848118 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="3e4c557e60566fa7490dab6f394b04defbcc9627b9ce632b202bd8c9b884a9dd" exitCode=0 Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.848205 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"3e4c557e60566fa7490dab6f394b04defbcc9627b9ce632b202bd8c9b884a9dd"} Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.848886 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2"} Jan 22 13:04:04 crc kubenswrapper[4773]: I0122 13:04:04.848917 4773 scope.go:117] "RemoveContainer" containerID="aa8554a11d48557c6b0bb700969012d1d416aabc4426ca19cf7c27dbc796f5b9" Jan 22 13:06:04 crc kubenswrapper[4773]: I0122 13:06:04.074805 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:06:04 crc kubenswrapper[4773]: I0122 13:06:04.075415 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:06:34 crc kubenswrapper[4773]: I0122 13:06:34.074545 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:06:34 crc kubenswrapper[4773]: I0122 13:06:34.075259 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.075022 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.075652 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.075769 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.076738 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.076875 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" gracePeriod=600 Jan 22 13:07:04 crc kubenswrapper[4773]: E0122 13:07:04.202205 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.749989 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" exitCode=0 Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.750108 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2"} Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.750207 4773 scope.go:117] "RemoveContainer" containerID="3e4c557e60566fa7490dab6f394b04defbcc9627b9ce632b202bd8c9b884a9dd" Jan 22 13:07:04 crc kubenswrapper[4773]: I0122 13:07:04.750590 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:07:04 crc kubenswrapper[4773]: E0122 13:07:04.750843 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:07:19 crc kubenswrapper[4773]: I0122 13:07:19.658190 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:07:19 crc kubenswrapper[4773]: E0122 13:07:19.659379 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:07:32 crc kubenswrapper[4773]: I0122 13:07:32.660977 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:07:32 crc kubenswrapper[4773]: E0122 13:07:32.661858 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:07:45 crc kubenswrapper[4773]: I0122 13:07:45.659124 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:07:45 crc kubenswrapper[4773]: E0122 13:07:45.660189 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:07:59 crc kubenswrapper[4773]: I0122 13:07:59.660822 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:07:59 crc kubenswrapper[4773]: E0122 13:07:59.665457 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:08:10 crc kubenswrapper[4773]: I0122 13:08:10.658824 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:08:10 crc kubenswrapper[4773]: E0122 13:08:10.659775 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:08:25 crc kubenswrapper[4773]: I0122 13:08:25.658758 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:08:25 crc kubenswrapper[4773]: E0122 13:08:25.660811 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:08:37 crc kubenswrapper[4773]: I0122 13:08:37.657911 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:08:37 crc kubenswrapper[4773]: E0122 13:08:37.658840 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129002 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9mwgz"] Jan 22 13:08:42 crc kubenswrapper[4773]: E0122 13:08:42.129680 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="extract-utilities" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129698 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="extract-utilities" Jan 22 13:08:42 crc kubenswrapper[4773]: E0122 13:08:42.129714 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="extract-content" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129720 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="extract-content" Jan 22 13:08:42 crc kubenswrapper[4773]: E0122 13:08:42.129732 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="registry-server" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129738 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="registry-server" Jan 22 13:08:42 crc kubenswrapper[4773]: E0122 13:08:42.129752 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="registry-server" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129765 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="registry-server" Jan 22 13:08:42 crc kubenswrapper[4773]: E0122 13:08:42.129773 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="extract-utilities" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129779 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="extract-utilities" Jan 22 13:08:42 crc kubenswrapper[4773]: E0122 13:08:42.129792 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="extract-content" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129798 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="extract-content" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129928 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe1abe4b-1d4e-422f-9de0-11d96a85fa80" containerName="registry-server" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.129958 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba9eace9-5215-4a6c-9d10-e5b518b149e8" containerName="registry-server" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.131146 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.143001 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mwgz"] Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.325684 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-utilities\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.325737 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-catalog-content\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.325760 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lr58\" (UniqueName: \"kubernetes.io/projected/178bff38-be5e-4acf-b2c4-0cfda9c9c088-kube-api-access-5lr58\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.426948 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-utilities\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.426997 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-catalog-content\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.427019 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lr58\" (UniqueName: \"kubernetes.io/projected/178bff38-be5e-4acf-b2c4-0cfda9c9c088-kube-api-access-5lr58\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.427727 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-utilities\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.427820 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-catalog-content\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.447827 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lr58\" (UniqueName: \"kubernetes.io/projected/178bff38-be5e-4acf-b2c4-0cfda9c9c088-kube-api-access-5lr58\") pod \"community-operators-9mwgz\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:42 crc kubenswrapper[4773]: I0122 13:08:42.451270 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:43 crc kubenswrapper[4773]: I0122 13:08:43.233469 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9mwgz"] Jan 22 13:08:43 crc kubenswrapper[4773]: W0122 13:08:43.240478 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod178bff38_be5e_4acf_b2c4_0cfda9c9c088.slice/crio-c265a22f7924477c3d5400e5573ab46d1bb3d15236dc0835f52dccca26acd4f6 WatchSource:0}: Error finding container c265a22f7924477c3d5400e5573ab46d1bb3d15236dc0835f52dccca26acd4f6: Status 404 returned error can't find the container with id c265a22f7924477c3d5400e5573ab46d1bb3d15236dc0835f52dccca26acd4f6 Jan 22 13:08:43 crc kubenswrapper[4773]: I0122 13:08:43.875997 4773 generic.go:334] "Generic (PLEG): container finished" podID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerID="04eb4fa296e1199b4ed96707fe92c7074e297897fa20ba035d37fb36a62e7a5f" exitCode=0 Jan 22 13:08:43 crc kubenswrapper[4773]: I0122 13:08:43.876377 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mwgz" event={"ID":"178bff38-be5e-4acf-b2c4-0cfda9c9c088","Type":"ContainerDied","Data":"04eb4fa296e1199b4ed96707fe92c7074e297897fa20ba035d37fb36a62e7a5f"} Jan 22 13:08:43 crc kubenswrapper[4773]: I0122 13:08:43.876454 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mwgz" event={"ID":"178bff38-be5e-4acf-b2c4-0cfda9c9c088","Type":"ContainerStarted","Data":"c265a22f7924477c3d5400e5573ab46d1bb3d15236dc0835f52dccca26acd4f6"} Jan 22 13:08:43 crc kubenswrapper[4773]: I0122 13:08:43.877518 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:08:45 crc kubenswrapper[4773]: I0122 13:08:45.894414 4773 generic.go:334] "Generic (PLEG): container finished" podID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerID="4f6b0be12e146667d24cdc36cc0da5a4b0ac2e58b875721c4faae658010f188e" exitCode=0 Jan 22 13:08:45 crc kubenswrapper[4773]: I0122 13:08:45.894510 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mwgz" event={"ID":"178bff38-be5e-4acf-b2c4-0cfda9c9c088","Type":"ContainerDied","Data":"4f6b0be12e146667d24cdc36cc0da5a4b0ac2e58b875721c4faae658010f188e"} Jan 22 13:08:46 crc kubenswrapper[4773]: I0122 13:08:46.904352 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mwgz" event={"ID":"178bff38-be5e-4acf-b2c4-0cfda9c9c088","Type":"ContainerStarted","Data":"637b967a603a773be0354131db3fc9e729283c1f3d03b86a76dd7473af3eaf73"} Jan 22 13:08:46 crc kubenswrapper[4773]: I0122 13:08:46.935028 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9mwgz" podStartSLOduration=2.440148617 podStartE2EDuration="4.935004009s" podCreationTimestamp="2026-01-22 13:08:42 +0000 UTC" firstStartedPulling="2026-01-22 13:08:43.876671411 +0000 UTC m=+4431.454787266" lastFinishedPulling="2026-01-22 13:08:46.371526812 +0000 UTC m=+4433.949642658" observedRunningTime="2026-01-22 13:08:46.929606788 +0000 UTC m=+4434.507722633" watchObservedRunningTime="2026-01-22 13:08:46.935004009 +0000 UTC m=+4434.513119834" Jan 22 13:08:49 crc kubenswrapper[4773]: I0122 13:08:49.657952 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:08:49 crc kubenswrapper[4773]: E0122 13:08:49.658855 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:08:52 crc kubenswrapper[4773]: I0122 13:08:52.452170 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:52 crc kubenswrapper[4773]: I0122 13:08:52.452244 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:52 crc kubenswrapper[4773]: I0122 13:08:52.500404 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:53 crc kubenswrapper[4773]: I0122 13:08:53.000093 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:53 crc kubenswrapper[4773]: I0122 13:08:53.098468 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mwgz"] Jan 22 13:08:54 crc kubenswrapper[4773]: I0122 13:08:54.972477 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9mwgz" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="registry-server" containerID="cri-o://637b967a603a773be0354131db3fc9e729283c1f3d03b86a76dd7473af3eaf73" gracePeriod=2 Jan 22 13:08:56 crc kubenswrapper[4773]: I0122 13:08:56.992777 4773 generic.go:334] "Generic (PLEG): container finished" podID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerID="637b967a603a773be0354131db3fc9e729283c1f3d03b86a76dd7473af3eaf73" exitCode=0 Jan 22 13:08:56 crc kubenswrapper[4773]: I0122 13:08:56.992866 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mwgz" event={"ID":"178bff38-be5e-4acf-b2c4-0cfda9c9c088","Type":"ContainerDied","Data":"637b967a603a773be0354131db3fc9e729283c1f3d03b86a76dd7473af3eaf73"} Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.196190 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.295762 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lr58\" (UniqueName: \"kubernetes.io/projected/178bff38-be5e-4acf-b2c4-0cfda9c9c088-kube-api-access-5lr58\") pod \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.295809 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-catalog-content\") pod \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.295962 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-utilities\") pod \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\" (UID: \"178bff38-be5e-4acf-b2c4-0cfda9c9c088\") " Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.298205 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-utilities" (OuterVolumeSpecName: "utilities") pod "178bff38-be5e-4acf-b2c4-0cfda9c9c088" (UID: "178bff38-be5e-4acf-b2c4-0cfda9c9c088"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.302915 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/178bff38-be5e-4acf-b2c4-0cfda9c9c088-kube-api-access-5lr58" (OuterVolumeSpecName: "kube-api-access-5lr58") pod "178bff38-be5e-4acf-b2c4-0cfda9c9c088" (UID: "178bff38-be5e-4acf-b2c4-0cfda9c9c088"). InnerVolumeSpecName "kube-api-access-5lr58". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.350067 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "178bff38-be5e-4acf-b2c4-0cfda9c9c088" (UID: "178bff38-be5e-4acf-b2c4-0cfda9c9c088"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.397804 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lr58\" (UniqueName: \"kubernetes.io/projected/178bff38-be5e-4acf-b2c4-0cfda9c9c088-kube-api-access-5lr58\") on node \"crc\" DevicePath \"\"" Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.397854 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:08:57 crc kubenswrapper[4773]: I0122 13:08:57.397865 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/178bff38-be5e-4acf-b2c4-0cfda9c9c088-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.003769 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9mwgz" event={"ID":"178bff38-be5e-4acf-b2c4-0cfda9c9c088","Type":"ContainerDied","Data":"c265a22f7924477c3d5400e5573ab46d1bb3d15236dc0835f52dccca26acd4f6"} Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.003883 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9mwgz" Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.004702 4773 scope.go:117] "RemoveContainer" containerID="637b967a603a773be0354131db3fc9e729283c1f3d03b86a76dd7473af3eaf73" Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.028521 4773 scope.go:117] "RemoveContainer" containerID="4f6b0be12e146667d24cdc36cc0da5a4b0ac2e58b875721c4faae658010f188e" Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.043854 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9mwgz"] Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.060461 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9mwgz"] Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.064771 4773 scope.go:117] "RemoveContainer" containerID="04eb4fa296e1199b4ed96707fe92c7074e297897fa20ba035d37fb36a62e7a5f" Jan 22 13:08:58 crc kubenswrapper[4773]: I0122 13:08:58.668159 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" path="/var/lib/kubelet/pods/178bff38-be5e-4acf-b2c4-0cfda9c9c088/volumes" Jan 22 13:09:03 crc kubenswrapper[4773]: I0122 13:09:03.658436 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:09:03 crc kubenswrapper[4773]: E0122 13:09:03.659096 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:09:17 crc kubenswrapper[4773]: I0122 13:09:17.657664 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:09:17 crc kubenswrapper[4773]: E0122 13:09:17.658541 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:09:31 crc kubenswrapper[4773]: I0122 13:09:31.658114 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:09:31 crc kubenswrapper[4773]: E0122 13:09:31.658914 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:09:44 crc kubenswrapper[4773]: I0122 13:09:44.658731 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:09:44 crc kubenswrapper[4773]: E0122 13:09:44.659671 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:09:59 crc kubenswrapper[4773]: I0122 13:09:59.658920 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:09:59 crc kubenswrapper[4773]: E0122 13:09:59.659695 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:10:13 crc kubenswrapper[4773]: I0122 13:10:13.658875 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:10:13 crc kubenswrapper[4773]: E0122 13:10:13.659911 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:10:28 crc kubenswrapper[4773]: I0122 13:10:28.658178 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:10:28 crc kubenswrapper[4773]: E0122 13:10:28.659022 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:10:39 crc kubenswrapper[4773]: I0122 13:10:39.658726 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:10:39 crc kubenswrapper[4773]: E0122 13:10:39.659487 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:10:53 crc kubenswrapper[4773]: I0122 13:10:53.658612 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:10:53 crc kubenswrapper[4773]: E0122 13:10:53.659573 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:11:06 crc kubenswrapper[4773]: I0122 13:11:06.659300 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:11:06 crc kubenswrapper[4773]: E0122 13:11:06.660685 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:11:20 crc kubenswrapper[4773]: I0122 13:11:20.658862 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:11:20 crc kubenswrapper[4773]: E0122 13:11:20.659551 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.134018 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4j49g"] Jan 22 13:11:25 crc kubenswrapper[4773]: E0122 13:11:25.134699 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="extract-utilities" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.134711 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="extract-utilities" Jan 22 13:11:25 crc kubenswrapper[4773]: E0122 13:11:25.134729 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="registry-server" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.134735 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="registry-server" Jan 22 13:11:25 crc kubenswrapper[4773]: E0122 13:11:25.134744 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="extract-content" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.134752 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="extract-content" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.134882 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="178bff38-be5e-4acf-b2c4-0cfda9c9c088" containerName="registry-server" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.135967 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.150887 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4j49g"] Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.189910 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzk7l\" (UniqueName: \"kubernetes.io/projected/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-kube-api-access-kzk7l\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.190039 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-catalog-content\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.190137 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-utilities\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.291371 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzk7l\" (UniqueName: \"kubernetes.io/projected/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-kube-api-access-kzk7l\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.291513 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-catalog-content\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.291691 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-utilities\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.292196 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-catalog-content\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.292459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-utilities\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.321770 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzk7l\" (UniqueName: \"kubernetes.io/projected/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-kube-api-access-kzk7l\") pod \"redhat-operators-4j49g\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.458239 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:25 crc kubenswrapper[4773]: I0122 13:11:25.990615 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4j49g"] Jan 22 13:11:26 crc kubenswrapper[4773]: I0122 13:11:26.320157 4773 generic.go:334] "Generic (PLEG): container finished" podID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerID="9773776e88cd43ce3bd19c7789b427ecc683a6fed49d9947b565d2bc23c737f1" exitCode=0 Jan 22 13:11:26 crc kubenswrapper[4773]: I0122 13:11:26.320199 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerDied","Data":"9773776e88cd43ce3bd19c7789b427ecc683a6fed49d9947b565d2bc23c737f1"} Jan 22 13:11:26 crc kubenswrapper[4773]: I0122 13:11:26.320226 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerStarted","Data":"9bbcb9053758d377688dc75c16691f9f6b8b268baab618dbc1602f78ace6a196"} Jan 22 13:11:29 crc kubenswrapper[4773]: I0122 13:11:29.346796 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerStarted","Data":"debd449b62247887764dffca9058f8e74c8724a8b4a7159bf78ffe3d96225b11"} Jan 22 13:11:30 crc kubenswrapper[4773]: I0122 13:11:30.358937 4773 generic.go:334] "Generic (PLEG): container finished" podID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerID="debd449b62247887764dffca9058f8e74c8724a8b4a7159bf78ffe3d96225b11" exitCode=0 Jan 22 13:11:30 crc kubenswrapper[4773]: I0122 13:11:30.358998 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerDied","Data":"debd449b62247887764dffca9058f8e74c8724a8b4a7159bf78ffe3d96225b11"} Jan 22 13:11:31 crc kubenswrapper[4773]: I0122 13:11:31.369173 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerStarted","Data":"d1459cd828feee7e39b77960550fa70b8ee5401fc2ea6173dc3e5cc8b7563d71"} Jan 22 13:11:31 crc kubenswrapper[4773]: I0122 13:11:31.399281 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4j49g" podStartSLOduration=1.8545677120000001 podStartE2EDuration="6.399259181s" podCreationTimestamp="2026-01-22 13:11:25 +0000 UTC" firstStartedPulling="2026-01-22 13:11:26.322274271 +0000 UTC m=+4593.900390096" lastFinishedPulling="2026-01-22 13:11:30.8669657 +0000 UTC m=+4598.445081565" observedRunningTime="2026-01-22 13:11:31.393474049 +0000 UTC m=+4598.971589894" watchObservedRunningTime="2026-01-22 13:11:31.399259181 +0000 UTC m=+4598.977374996" Jan 22 13:11:31 crc kubenswrapper[4773]: I0122 13:11:31.658952 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:11:31 crc kubenswrapper[4773]: E0122 13:11:31.659364 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:11:35 crc kubenswrapper[4773]: I0122 13:11:35.458732 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:35 crc kubenswrapper[4773]: I0122 13:11:35.459223 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:36 crc kubenswrapper[4773]: I0122 13:11:36.507411 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4j49g" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="registry-server" probeResult="failure" output=< Jan 22 13:11:36 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:11:36 crc kubenswrapper[4773]: > Jan 22 13:11:45 crc kubenswrapper[4773]: I0122 13:11:45.501410 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:45 crc kubenswrapper[4773]: I0122 13:11:45.561071 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:45 crc kubenswrapper[4773]: I0122 13:11:45.814539 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4j49g"] Jan 22 13:11:46 crc kubenswrapper[4773]: I0122 13:11:46.658469 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:11:46 crc kubenswrapper[4773]: E0122 13:11:46.659476 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:11:47 crc kubenswrapper[4773]: I0122 13:11:47.516035 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4j49g" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="registry-server" containerID="cri-o://d1459cd828feee7e39b77960550fa70b8ee5401fc2ea6173dc3e5cc8b7563d71" gracePeriod=2 Jan 22 13:11:49 crc kubenswrapper[4773]: I0122 13:11:49.655879 4773 generic.go:334] "Generic (PLEG): container finished" podID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerID="d1459cd828feee7e39b77960550fa70b8ee5401fc2ea6173dc3e5cc8b7563d71" exitCode=0 Jan 22 13:11:49 crc kubenswrapper[4773]: I0122 13:11:49.655911 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerDied","Data":"d1459cd828feee7e39b77960550fa70b8ee5401fc2ea6173dc3e5cc8b7563d71"} Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.610687 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.666467 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4j49g" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.669483 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4j49g" event={"ID":"ff7198f9-12e2-4d98-8e70-a4870c05f3ab","Type":"ContainerDied","Data":"9bbcb9053758d377688dc75c16691f9f6b8b268baab618dbc1602f78ace6a196"} Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.669567 4773 scope.go:117] "RemoveContainer" containerID="d1459cd828feee7e39b77960550fa70b8ee5401fc2ea6173dc3e5cc8b7563d71" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.687971 4773 scope.go:117] "RemoveContainer" containerID="debd449b62247887764dffca9058f8e74c8724a8b4a7159bf78ffe3d96225b11" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.708653 4773 scope.go:117] "RemoveContainer" containerID="9773776e88cd43ce3bd19c7789b427ecc683a6fed49d9947b565d2bc23c737f1" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.768956 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzk7l\" (UniqueName: \"kubernetes.io/projected/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-kube-api-access-kzk7l\") pod \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.769036 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-utilities\") pod \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.769090 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-catalog-content\") pod \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\" (UID: \"ff7198f9-12e2-4d98-8e70-a4870c05f3ab\") " Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.770667 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-utilities" (OuterVolumeSpecName: "utilities") pod "ff7198f9-12e2-4d98-8e70-a4870c05f3ab" (UID: "ff7198f9-12e2-4d98-8e70-a4870c05f3ab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.774741 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-kube-api-access-kzk7l" (OuterVolumeSpecName: "kube-api-access-kzk7l") pod "ff7198f9-12e2-4d98-8e70-a4870c05f3ab" (UID: "ff7198f9-12e2-4d98-8e70-a4870c05f3ab"). InnerVolumeSpecName "kube-api-access-kzk7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.870640 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.870670 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzk7l\" (UniqueName: \"kubernetes.io/projected/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-kube-api-access-kzk7l\") on node \"crc\" DevicePath \"\"" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.900467 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff7198f9-12e2-4d98-8e70-a4870c05f3ab" (UID: "ff7198f9-12e2-4d98-8e70-a4870c05f3ab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:11:50 crc kubenswrapper[4773]: I0122 13:11:50.971338 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff7198f9-12e2-4d98-8e70-a4870c05f3ab-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:11:51 crc kubenswrapper[4773]: I0122 13:11:51.003253 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4j49g"] Jan 22 13:11:51 crc kubenswrapper[4773]: I0122 13:11:51.010626 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4j49g"] Jan 22 13:11:52 crc kubenswrapper[4773]: I0122 13:11:52.667837 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" path="/var/lib/kubelet/pods/ff7198f9-12e2-4d98-8e70-a4870c05f3ab/volumes" Jan 22 13:12:01 crc kubenswrapper[4773]: I0122 13:12:01.658863 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:12:01 crc kubenswrapper[4773]: E0122 13:12:01.659987 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:12:16 crc kubenswrapper[4773]: I0122 13:12:16.657891 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:12:16 crc kubenswrapper[4773]: I0122 13:12:16.900034 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"d4e2d921efb64680f5d5b106790c3c2cc804fe6e41b407d96c6141a303a48c40"} Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.449360 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m2k2w"] Jan 22 13:13:51 crc kubenswrapper[4773]: E0122 13:13:51.450177 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="registry-server" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.450188 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="registry-server" Jan 22 13:13:51 crc kubenswrapper[4773]: E0122 13:13:51.450216 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="extract-content" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.450225 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="extract-content" Jan 22 13:13:51 crc kubenswrapper[4773]: E0122 13:13:51.450235 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="extract-utilities" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.450243 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="extract-utilities" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.450421 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff7198f9-12e2-4d98-8e70-a4870c05f3ab" containerName="registry-server" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.451513 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.514608 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2k2w"] Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.605730 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjhxv\" (UniqueName: \"kubernetes.io/projected/ea19c446-0aad-4830-9b37-2257dd6fbad9-kube-api-access-tjhxv\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.605777 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-utilities\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.605814 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-catalog-content\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.707348 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-catalog-content\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.707489 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjhxv\" (UniqueName: \"kubernetes.io/projected/ea19c446-0aad-4830-9b37-2257dd6fbad9-kube-api-access-tjhxv\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.707513 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-utilities\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.707948 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-catalog-content\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.708569 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-utilities\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.726811 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjhxv\" (UniqueName: \"kubernetes.io/projected/ea19c446-0aad-4830-9b37-2257dd6fbad9-kube-api-access-tjhxv\") pod \"redhat-marketplace-m2k2w\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:51 crc kubenswrapper[4773]: I0122 13:13:51.779707 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:13:52 crc kubenswrapper[4773]: I0122 13:13:52.233421 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2k2w"] Jan 22 13:13:52 crc kubenswrapper[4773]: I0122 13:13:52.678772 4773 generic.go:334] "Generic (PLEG): container finished" podID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerID="6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781" exitCode=0 Jan 22 13:13:52 crc kubenswrapper[4773]: I0122 13:13:52.678814 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerDied","Data":"6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781"} Jan 22 13:13:52 crc kubenswrapper[4773]: I0122 13:13:52.678840 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerStarted","Data":"618e04623ad0342bc2f184abc5c67bf59d47cc477f0cfa50ec61f49e7fa00249"} Jan 22 13:13:52 crc kubenswrapper[4773]: I0122 13:13:52.680766 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:13:53 crc kubenswrapper[4773]: I0122 13:13:53.687891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerStarted","Data":"73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6"} Jan 22 13:13:54 crc kubenswrapper[4773]: I0122 13:13:54.695604 4773 generic.go:334] "Generic (PLEG): container finished" podID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerID="73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6" exitCode=0 Jan 22 13:13:54 crc kubenswrapper[4773]: I0122 13:13:54.695684 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerDied","Data":"73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6"} Jan 22 13:13:54 crc kubenswrapper[4773]: I0122 13:13:54.695965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerStarted","Data":"f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33"} Jan 22 13:13:54 crc kubenswrapper[4773]: I0122 13:13:54.713908 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m2k2w" podStartSLOduration=2.107384577 podStartE2EDuration="3.7138847s" podCreationTimestamp="2026-01-22 13:13:51 +0000 UTC" firstStartedPulling="2026-01-22 13:13:52.680512408 +0000 UTC m=+4740.258628223" lastFinishedPulling="2026-01-22 13:13:54.287012511 +0000 UTC m=+4741.865128346" observedRunningTime="2026-01-22 13:13:54.710818993 +0000 UTC m=+4742.288934818" watchObservedRunningTime="2026-01-22 13:13:54.7138847 +0000 UTC m=+4742.292000525" Jan 22 13:14:01 crc kubenswrapper[4773]: I0122 13:14:01.780907 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:14:01 crc kubenswrapper[4773]: I0122 13:14:01.781455 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:14:01 crc kubenswrapper[4773]: I0122 13:14:01.826972 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:14:02 crc kubenswrapper[4773]: I0122 13:14:02.808471 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:14:02 crc kubenswrapper[4773]: I0122 13:14:02.897170 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2k2w"] Jan 22 13:14:04 crc kubenswrapper[4773]: I0122 13:14:04.772530 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m2k2w" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="registry-server" containerID="cri-o://f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33" gracePeriod=2 Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.635519 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.721096 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-catalog-content\") pod \"ea19c446-0aad-4830-9b37-2257dd6fbad9\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.721252 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjhxv\" (UniqueName: \"kubernetes.io/projected/ea19c446-0aad-4830-9b37-2257dd6fbad9-kube-api-access-tjhxv\") pod \"ea19c446-0aad-4830-9b37-2257dd6fbad9\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.721366 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-utilities\") pod \"ea19c446-0aad-4830-9b37-2257dd6fbad9\" (UID: \"ea19c446-0aad-4830-9b37-2257dd6fbad9\") " Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.722694 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-utilities" (OuterVolumeSpecName: "utilities") pod "ea19c446-0aad-4830-9b37-2257dd6fbad9" (UID: "ea19c446-0aad-4830-9b37-2257dd6fbad9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.726836 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea19c446-0aad-4830-9b37-2257dd6fbad9-kube-api-access-tjhxv" (OuterVolumeSpecName: "kube-api-access-tjhxv") pod "ea19c446-0aad-4830-9b37-2257dd6fbad9" (UID: "ea19c446-0aad-4830-9b37-2257dd6fbad9"). InnerVolumeSpecName "kube-api-access-tjhxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.743249 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea19c446-0aad-4830-9b37-2257dd6fbad9" (UID: "ea19c446-0aad-4830-9b37-2257dd6fbad9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.782278 4773 generic.go:334] "Generic (PLEG): container finished" podID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerID="f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33" exitCode=0 Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.782353 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerDied","Data":"f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33"} Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.782388 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2k2w" event={"ID":"ea19c446-0aad-4830-9b37-2257dd6fbad9","Type":"ContainerDied","Data":"618e04623ad0342bc2f184abc5c67bf59d47cc477f0cfa50ec61f49e7fa00249"} Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.782388 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2k2w" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.782412 4773 scope.go:117] "RemoveContainer" containerID="f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.808611 4773 scope.go:117] "RemoveContainer" containerID="73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.813040 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2k2w"] Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.817848 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2k2w"] Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.823247 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjhxv\" (UniqueName: \"kubernetes.io/projected/ea19c446-0aad-4830-9b37-2257dd6fbad9-kube-api-access-tjhxv\") on node \"crc\" DevicePath \"\"" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.823364 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.823379 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea19c446-0aad-4830-9b37-2257dd6fbad9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.840877 4773 scope.go:117] "RemoveContainer" containerID="6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.856660 4773 scope.go:117] "RemoveContainer" containerID="f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33" Jan 22 13:14:05 crc kubenswrapper[4773]: E0122 13:14:05.857131 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33\": container with ID starting with f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33 not found: ID does not exist" containerID="f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.857235 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33"} err="failed to get container status \"f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33\": rpc error: code = NotFound desc = could not find container \"f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33\": container with ID starting with f17d1afeb588e8d9c2b5c079aa5f4e77093a29ced8b9d22ba3c090f5fc56ed33 not found: ID does not exist" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.857269 4773 scope.go:117] "RemoveContainer" containerID="73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6" Jan 22 13:14:05 crc kubenswrapper[4773]: E0122 13:14:05.857658 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6\": container with ID starting with 73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6 not found: ID does not exist" containerID="73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.857710 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6"} err="failed to get container status \"73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6\": rpc error: code = NotFound desc = could not find container \"73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6\": container with ID starting with 73fe6f5d01468a7faeb8e5dc2f116ec57a5cefbbd20bf15c7352b46bc1b664d6 not found: ID does not exist" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.857746 4773 scope.go:117] "RemoveContainer" containerID="6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781" Jan 22 13:14:05 crc kubenswrapper[4773]: E0122 13:14:05.858075 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781\": container with ID starting with 6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781 not found: ID does not exist" containerID="6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781" Jan 22 13:14:05 crc kubenswrapper[4773]: I0122 13:14:05.858093 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781"} err="failed to get container status \"6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781\": rpc error: code = NotFound desc = could not find container \"6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781\": container with ID starting with 6cf1faec0c4510a2b9fecd001df1f07286fac19abe58d0c1b0e115422cae8781 not found: ID does not exist" Jan 22 13:14:06 crc kubenswrapper[4773]: I0122 13:14:06.675755 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" path="/var/lib/kubelet/pods/ea19c446-0aad-4830-9b37-2257dd6fbad9/volumes" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.093722 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jd6fg"] Jan 22 13:14:33 crc kubenswrapper[4773]: E0122 13:14:33.097696 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="registry-server" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.097987 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="registry-server" Jan 22 13:14:33 crc kubenswrapper[4773]: E0122 13:14:33.098270 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="extract-content" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.098518 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="extract-content" Jan 22 13:14:33 crc kubenswrapper[4773]: E0122 13:14:33.098788 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="extract-utilities" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.099001 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="extract-utilities" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.099539 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea19c446-0aad-4830-9b37-2257dd6fbad9" containerName="registry-server" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.101495 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.109389 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jd6fg"] Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.195557 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-catalog-content\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.195740 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n247h\" (UniqueName: \"kubernetes.io/projected/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-kube-api-access-n247h\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.195825 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-utilities\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.297911 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n247h\" (UniqueName: \"kubernetes.io/projected/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-kube-api-access-n247h\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.298013 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-utilities\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.298071 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-catalog-content\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.298959 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-utilities\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.299058 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-catalog-content\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.318069 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n247h\" (UniqueName: \"kubernetes.io/projected/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-kube-api-access-n247h\") pod \"certified-operators-jd6fg\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.426630 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:33 crc kubenswrapper[4773]: I0122 13:14:33.995248 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jd6fg"] Jan 22 13:14:34 crc kubenswrapper[4773]: I0122 13:14:34.024222 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd6fg" event={"ID":"00f7f9b6-3793-4ed1-8aee-44361d1b20b2","Type":"ContainerStarted","Data":"67d2180bdc9e3cd9001b429a79ecd52f0e2bf21a7bb1a5f78632324bcfe76010"} Jan 22 13:14:34 crc kubenswrapper[4773]: I0122 13:14:34.074041 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:14:34 crc kubenswrapper[4773]: I0122 13:14:34.074375 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:14:35 crc kubenswrapper[4773]: I0122 13:14:35.038756 4773 generic.go:334] "Generic (PLEG): container finished" podID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerID="28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc" exitCode=0 Jan 22 13:14:35 crc kubenswrapper[4773]: I0122 13:14:35.038840 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd6fg" event={"ID":"00f7f9b6-3793-4ed1-8aee-44361d1b20b2","Type":"ContainerDied","Data":"28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc"} Jan 22 13:14:37 crc kubenswrapper[4773]: I0122 13:14:37.054052 4773 generic.go:334] "Generic (PLEG): container finished" podID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerID="3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7" exitCode=0 Jan 22 13:14:37 crc kubenswrapper[4773]: I0122 13:14:37.054113 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd6fg" event={"ID":"00f7f9b6-3793-4ed1-8aee-44361d1b20b2","Type":"ContainerDied","Data":"3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7"} Jan 22 13:14:38 crc kubenswrapper[4773]: I0122 13:14:38.062917 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd6fg" event={"ID":"00f7f9b6-3793-4ed1-8aee-44361d1b20b2","Type":"ContainerStarted","Data":"d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e"} Jan 22 13:14:43 crc kubenswrapper[4773]: I0122 13:14:43.427725 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:43 crc kubenswrapper[4773]: I0122 13:14:43.428901 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:43 crc kubenswrapper[4773]: I0122 13:14:43.471046 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:43 crc kubenswrapper[4773]: I0122 13:14:43.494462 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jd6fg" podStartSLOduration=7.934491393 podStartE2EDuration="10.494424116s" podCreationTimestamp="2026-01-22 13:14:33 +0000 UTC" firstStartedPulling="2026-01-22 13:14:35.041417441 +0000 UTC m=+4782.619533266" lastFinishedPulling="2026-01-22 13:14:37.601350154 +0000 UTC m=+4785.179465989" observedRunningTime="2026-01-22 13:14:38.082213847 +0000 UTC m=+4785.660329672" watchObservedRunningTime="2026-01-22 13:14:43.494424116 +0000 UTC m=+4791.072539971" Jan 22 13:14:44 crc kubenswrapper[4773]: I0122 13:14:44.158459 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:44 crc kubenswrapper[4773]: I0122 13:14:44.209644 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jd6fg"] Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.136232 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jd6fg" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="registry-server" containerID="cri-o://d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e" gracePeriod=2 Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.528517 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.533281 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-utilities\") pod \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.533422 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-catalog-content\") pod \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.533491 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n247h\" (UniqueName: \"kubernetes.io/projected/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-kube-api-access-n247h\") pod \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\" (UID: \"00f7f9b6-3793-4ed1-8aee-44361d1b20b2\") " Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.536122 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-utilities" (OuterVolumeSpecName: "utilities") pod "00f7f9b6-3793-4ed1-8aee-44361d1b20b2" (UID: "00f7f9b6-3793-4ed1-8aee-44361d1b20b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.542895 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-kube-api-access-n247h" (OuterVolumeSpecName: "kube-api-access-n247h") pod "00f7f9b6-3793-4ed1-8aee-44361d1b20b2" (UID: "00f7f9b6-3793-4ed1-8aee-44361d1b20b2"). InnerVolumeSpecName "kube-api-access-n247h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.630858 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "00f7f9b6-3793-4ed1-8aee-44361d1b20b2" (UID: "00f7f9b6-3793-4ed1-8aee-44361d1b20b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.634767 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.634805 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n247h\" (UniqueName: \"kubernetes.io/projected/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-kube-api-access-n247h\") on node \"crc\" DevicePath \"\"" Jan 22 13:14:46 crc kubenswrapper[4773]: I0122 13:14:46.634822 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/00f7f9b6-3793-4ed1-8aee-44361d1b20b2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.148018 4773 generic.go:334] "Generic (PLEG): container finished" podID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerID="d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e" exitCode=0 Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.148085 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd6fg" event={"ID":"00f7f9b6-3793-4ed1-8aee-44361d1b20b2","Type":"ContainerDied","Data":"d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e"} Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.148342 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd6fg" event={"ID":"00f7f9b6-3793-4ed1-8aee-44361d1b20b2","Type":"ContainerDied","Data":"67d2180bdc9e3cd9001b429a79ecd52f0e2bf21a7bb1a5f78632324bcfe76010"} Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.148205 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd6fg" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.148388 4773 scope.go:117] "RemoveContainer" containerID="d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.179801 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jd6fg"] Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.183884 4773 scope.go:117] "RemoveContainer" containerID="3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.191892 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jd6fg"] Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.207611 4773 scope.go:117] "RemoveContainer" containerID="28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.254095 4773 scope.go:117] "RemoveContainer" containerID="d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e" Jan 22 13:14:47 crc kubenswrapper[4773]: E0122 13:14:47.254685 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e\": container with ID starting with d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e not found: ID does not exist" containerID="d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.254726 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e"} err="failed to get container status \"d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e\": rpc error: code = NotFound desc = could not find container \"d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e\": container with ID starting with d088d08dc0396e8c04e744eb14ca837864d1247c19184a67b0595ce06d732b9e not found: ID does not exist" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.254748 4773 scope.go:117] "RemoveContainer" containerID="3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7" Jan 22 13:14:47 crc kubenswrapper[4773]: E0122 13:14:47.255308 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7\": container with ID starting with 3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7 not found: ID does not exist" containerID="3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.255330 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7"} err="failed to get container status \"3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7\": rpc error: code = NotFound desc = could not find container \"3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7\": container with ID starting with 3856b5fe4ff64fccadaec769a6ca9c78d2871a4f30b0d9319fc6c9cda5c89cb7 not found: ID does not exist" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.255343 4773 scope.go:117] "RemoveContainer" containerID="28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc" Jan 22 13:14:47 crc kubenswrapper[4773]: E0122 13:14:47.255686 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc\": container with ID starting with 28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc not found: ID does not exist" containerID="28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc" Jan 22 13:14:47 crc kubenswrapper[4773]: I0122 13:14:47.255706 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc"} err="failed to get container status \"28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc\": rpc error: code = NotFound desc = could not find container \"28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc\": container with ID starting with 28fb9c2dea7a55523f0fbc1de2a5e7bfae3bf34935f545916ea4f3b4675b6bcc not found: ID does not exist" Jan 22 13:14:48 crc kubenswrapper[4773]: I0122 13:14:48.684600 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" path="/var/lib/kubelet/pods/00f7f9b6-3793-4ed1-8aee-44361d1b20b2/volumes" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.162825 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs"] Jan 22 13:15:00 crc kubenswrapper[4773]: E0122 13:15:00.163916 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="extract-content" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.163938 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="extract-content" Jan 22 13:15:00 crc kubenswrapper[4773]: E0122 13:15:00.163961 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="extract-utilities" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.163970 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="extract-utilities" Jan 22 13:15:00 crc kubenswrapper[4773]: E0122 13:15:00.163990 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="registry-server" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.163998 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="registry-server" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.164200 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f7f9b6-3793-4ed1-8aee-44361d1b20b2" containerName="registry-server" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.164977 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.167988 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.169472 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.183053 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs"] Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.254103 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaa1bb3f-ee17-49fc-b936-2059f38308a1-config-volume\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.254258 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdr8d\" (UniqueName: \"kubernetes.io/projected/eaa1bb3f-ee17-49fc-b936-2059f38308a1-kube-api-access-kdr8d\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.254364 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaa1bb3f-ee17-49fc-b936-2059f38308a1-secret-volume\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.355709 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdr8d\" (UniqueName: \"kubernetes.io/projected/eaa1bb3f-ee17-49fc-b936-2059f38308a1-kube-api-access-kdr8d\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.355829 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaa1bb3f-ee17-49fc-b936-2059f38308a1-secret-volume\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.355888 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaa1bb3f-ee17-49fc-b936-2059f38308a1-config-volume\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.356803 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaa1bb3f-ee17-49fc-b936-2059f38308a1-config-volume\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.375358 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaa1bb3f-ee17-49fc-b936-2059f38308a1-secret-volume\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.377937 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdr8d\" (UniqueName: \"kubernetes.io/projected/eaa1bb3f-ee17-49fc-b936-2059f38308a1-kube-api-access-kdr8d\") pod \"collect-profiles-29484795-p5vcs\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.494762 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:00 crc kubenswrapper[4773]: I0122 13:15:00.737602 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs"] Jan 22 13:15:01 crc kubenswrapper[4773]: I0122 13:15:01.263236 4773 generic.go:334] "Generic (PLEG): container finished" podID="eaa1bb3f-ee17-49fc-b936-2059f38308a1" containerID="7e5a6cbfd6311a46afa3f8a28578f2cb4da026ca6b84322a25210a3dcd9f8b37" exitCode=0 Jan 22 13:15:01 crc kubenswrapper[4773]: I0122 13:15:01.263277 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" event={"ID":"eaa1bb3f-ee17-49fc-b936-2059f38308a1","Type":"ContainerDied","Data":"7e5a6cbfd6311a46afa3f8a28578f2cb4da026ca6b84322a25210a3dcd9f8b37"} Jan 22 13:15:01 crc kubenswrapper[4773]: I0122 13:15:01.263320 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" event={"ID":"eaa1bb3f-ee17-49fc-b936-2059f38308a1","Type":"ContainerStarted","Data":"a3930d70b1a653d6827c1f2b361ce16b493ced025298507788f59e909b02778c"} Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.544089 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.761917 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaa1bb3f-ee17-49fc-b936-2059f38308a1-secret-volume\") pod \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.793558 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaa1bb3f-ee17-49fc-b936-2059f38308a1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "eaa1bb3f-ee17-49fc-b936-2059f38308a1" (UID: "eaa1bb3f-ee17-49fc-b936-2059f38308a1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.872852 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaa1bb3f-ee17-49fc-b936-2059f38308a1-config-volume\") pod \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.873215 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdr8d\" (UniqueName: \"kubernetes.io/projected/eaa1bb3f-ee17-49fc-b936-2059f38308a1-kube-api-access-kdr8d\") pod \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\" (UID: \"eaa1bb3f-ee17-49fc-b936-2059f38308a1\") " Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.873680 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaa1bb3f-ee17-49fc-b936-2059f38308a1-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.873742 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaa1bb3f-ee17-49fc-b936-2059f38308a1-config-volume" (OuterVolumeSpecName: "config-volume") pod "eaa1bb3f-ee17-49fc-b936-2059f38308a1" (UID: "eaa1bb3f-ee17-49fc-b936-2059f38308a1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.875934 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaa1bb3f-ee17-49fc-b936-2059f38308a1-kube-api-access-kdr8d" (OuterVolumeSpecName: "kube-api-access-kdr8d") pod "eaa1bb3f-ee17-49fc-b936-2059f38308a1" (UID: "eaa1bb3f-ee17-49fc-b936-2059f38308a1"). InnerVolumeSpecName "kube-api-access-kdr8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.975796 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdr8d\" (UniqueName: \"kubernetes.io/projected/eaa1bb3f-ee17-49fc-b936-2059f38308a1-kube-api-access-kdr8d\") on node \"crc\" DevicePath \"\"" Jan 22 13:15:02 crc kubenswrapper[4773]: I0122 13:15:02.975862 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaa1bb3f-ee17-49fc-b936-2059f38308a1-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:15:03 crc kubenswrapper[4773]: I0122 13:15:03.283137 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" event={"ID":"eaa1bb3f-ee17-49fc-b936-2059f38308a1","Type":"ContainerDied","Data":"a3930d70b1a653d6827c1f2b361ce16b493ced025298507788f59e909b02778c"} Jan 22 13:15:03 crc kubenswrapper[4773]: I0122 13:15:03.283521 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3930d70b1a653d6827c1f2b361ce16b493ced025298507788f59e909b02778c" Jan 22 13:15:03 crc kubenswrapper[4773]: I0122 13:15:03.283199 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs" Jan 22 13:15:03 crc kubenswrapper[4773]: I0122 13:15:03.869839 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc"] Jan 22 13:15:03 crc kubenswrapper[4773]: I0122 13:15:03.876241 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484750-trskc"] Jan 22 13:15:04 crc kubenswrapper[4773]: I0122 13:15:04.073925 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:15:04 crc kubenswrapper[4773]: I0122 13:15:04.074021 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:15:04 crc kubenswrapper[4773]: I0122 13:15:04.673014 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b997907-7bc8-4d67-96a0-eacb8edc2bbd" path="/var/lib/kubelet/pods/5b997907-7bc8-4d67-96a0-eacb8edc2bbd/volumes" Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.074538 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.075054 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.075101 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.075663 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d4e2d921efb64680f5d5b106790c3c2cc804fe6e41b407d96c6141a303a48c40"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.075723 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://d4e2d921efb64680f5d5b106790c3c2cc804fe6e41b407d96c6141a303a48c40" gracePeriod=600 Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.588986 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="d4e2d921efb64680f5d5b106790c3c2cc804fe6e41b407d96c6141a303a48c40" exitCode=0 Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.589141 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"d4e2d921efb64680f5d5b106790c3c2cc804fe6e41b407d96c6141a303a48c40"} Jan 22 13:15:34 crc kubenswrapper[4773]: I0122 13:15:34.589269 4773 scope.go:117] "RemoveContainer" containerID="b3e2e9bcdf5963ae702a937f226d11ad6df3ede6ce78f71594f2bb17a93ba2d2" Jan 22 13:15:35 crc kubenswrapper[4773]: I0122 13:15:35.597106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47"} Jan 22 13:16:04 crc kubenswrapper[4773]: I0122 13:16:04.102929 4773 scope.go:117] "RemoveContainer" containerID="f9a9743f0629d424353b0e303711e967d587c657f37d582b22e16b065aedc0ed" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.483303 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-xtlsk"] Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.489309 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-xtlsk"] Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.625976 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-ljnpb"] Jan 22 13:16:22 crc kubenswrapper[4773]: E0122 13:16:22.626848 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa1bb3f-ee17-49fc-b936-2059f38308a1" containerName="collect-profiles" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.626988 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa1bb3f-ee17-49fc-b936-2059f38308a1" containerName="collect-profiles" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.627414 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaa1bb3f-ee17-49fc-b936-2059f38308a1" containerName="collect-profiles" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.628264 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.630336 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.630812 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.631653 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.631803 4773 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-cb2wx" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.642154 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ljnpb"] Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.669272 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03cd6a75-2bfa-4fcd-b6fe-658685d87f4b" path="/var/lib/kubelet/pods/03cd6a75-2bfa-4fcd-b6fe-658685d87f4b/volumes" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.703480 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9c59d362-e892-4a72-8efa-4ecafc26a40e-node-mnt\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.703565 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9c59d362-e892-4a72-8efa-4ecafc26a40e-crc-storage\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.703673 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxqzx\" (UniqueName: \"kubernetes.io/projected/9c59d362-e892-4a72-8efa-4ecafc26a40e-kube-api-access-fxqzx\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.805025 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxqzx\" (UniqueName: \"kubernetes.io/projected/9c59d362-e892-4a72-8efa-4ecafc26a40e-kube-api-access-fxqzx\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.805099 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9c59d362-e892-4a72-8efa-4ecafc26a40e-node-mnt\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.805160 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9c59d362-e892-4a72-8efa-4ecafc26a40e-crc-storage\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.805590 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9c59d362-e892-4a72-8efa-4ecafc26a40e-node-mnt\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.806143 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9c59d362-e892-4a72-8efa-4ecafc26a40e-crc-storage\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.830771 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxqzx\" (UniqueName: \"kubernetes.io/projected/9c59d362-e892-4a72-8efa-4ecafc26a40e-kube-api-access-fxqzx\") pod \"crc-storage-crc-ljnpb\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:22 crc kubenswrapper[4773]: I0122 13:16:22.945877 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:23 crc kubenswrapper[4773]: I0122 13:16:23.389056 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ljnpb"] Jan 22 13:16:23 crc kubenswrapper[4773]: I0122 13:16:23.975853 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ljnpb" event={"ID":"9c59d362-e892-4a72-8efa-4ecafc26a40e","Type":"ContainerStarted","Data":"30809079fbceac9cd6ea3e1f8d2c5ed835247e42b6f9df6373d2ac928b89e1da"} Jan 22 13:16:24 crc kubenswrapper[4773]: I0122 13:16:24.986506 4773 generic.go:334] "Generic (PLEG): container finished" podID="9c59d362-e892-4a72-8efa-4ecafc26a40e" containerID="d6c91b13c29c2b1118f0f5ddd5107ac35938d9aa5641d2eaeb612edac937dc71" exitCode=0 Jan 22 13:16:24 crc kubenswrapper[4773]: I0122 13:16:24.986614 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ljnpb" event={"ID":"9c59d362-e892-4a72-8efa-4ecafc26a40e","Type":"ContainerDied","Data":"d6c91b13c29c2b1118f0f5ddd5107ac35938d9aa5641d2eaeb612edac937dc71"} Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.290193 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.362641 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9c59d362-e892-4a72-8efa-4ecafc26a40e-node-mnt\") pod \"9c59d362-e892-4a72-8efa-4ecafc26a40e\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.362798 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxqzx\" (UniqueName: \"kubernetes.io/projected/9c59d362-e892-4a72-8efa-4ecafc26a40e-kube-api-access-fxqzx\") pod \"9c59d362-e892-4a72-8efa-4ecafc26a40e\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.362795 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c59d362-e892-4a72-8efa-4ecafc26a40e-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "9c59d362-e892-4a72-8efa-4ecafc26a40e" (UID: "9c59d362-e892-4a72-8efa-4ecafc26a40e"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.363626 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9c59d362-e892-4a72-8efa-4ecafc26a40e-crc-storage\") pod \"9c59d362-e892-4a72-8efa-4ecafc26a40e\" (UID: \"9c59d362-e892-4a72-8efa-4ecafc26a40e\") " Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.364124 4773 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9c59d362-e892-4a72-8efa-4ecafc26a40e-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.373081 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c59d362-e892-4a72-8efa-4ecafc26a40e-kube-api-access-fxqzx" (OuterVolumeSpecName: "kube-api-access-fxqzx") pod "9c59d362-e892-4a72-8efa-4ecafc26a40e" (UID: "9c59d362-e892-4a72-8efa-4ecafc26a40e"). InnerVolumeSpecName "kube-api-access-fxqzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.389678 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c59d362-e892-4a72-8efa-4ecafc26a40e-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "9c59d362-e892-4a72-8efa-4ecafc26a40e" (UID: "9c59d362-e892-4a72-8efa-4ecafc26a40e"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.465798 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxqzx\" (UniqueName: \"kubernetes.io/projected/9c59d362-e892-4a72-8efa-4ecafc26a40e-kube-api-access-fxqzx\") on node \"crc\" DevicePath \"\"" Jan 22 13:16:26 crc kubenswrapper[4773]: I0122 13:16:26.466391 4773 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9c59d362-e892-4a72-8efa-4ecafc26a40e-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 22 13:16:27 crc kubenswrapper[4773]: I0122 13:16:27.004971 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ljnpb" event={"ID":"9c59d362-e892-4a72-8efa-4ecafc26a40e","Type":"ContainerDied","Data":"30809079fbceac9cd6ea3e1f8d2c5ed835247e42b6f9df6373d2ac928b89e1da"} Jan 22 13:16:27 crc kubenswrapper[4773]: I0122 13:16:27.005009 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30809079fbceac9cd6ea3e1f8d2c5ed835247e42b6f9df6373d2ac928b89e1da" Jan 22 13:16:27 crc kubenswrapper[4773]: I0122 13:16:27.005382 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ljnpb" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.598831 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-ljnpb"] Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.604037 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-ljnpb"] Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.669618 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c59d362-e892-4a72-8efa-4ecafc26a40e" path="/var/lib/kubelet/pods/9c59d362-e892-4a72-8efa-4ecafc26a40e/volumes" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.728402 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-sc566"] Jan 22 13:16:28 crc kubenswrapper[4773]: E0122 13:16:28.728738 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c59d362-e892-4a72-8efa-4ecafc26a40e" containerName="storage" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.728751 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c59d362-e892-4a72-8efa-4ecafc26a40e" containerName="storage" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.728905 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c59d362-e892-4a72-8efa-4ecafc26a40e" containerName="storage" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.730817 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.737036 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.737320 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.737556 4773 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-cb2wx" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.737705 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.743123 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sc566"] Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.801362 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqg4n\" (UniqueName: \"kubernetes.io/projected/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-kube-api-access-rqg4n\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.801463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-node-mnt\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.801497 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-crc-storage\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.902651 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-node-mnt\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.902697 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-crc-storage\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.902775 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqg4n\" (UniqueName: \"kubernetes.io/projected/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-kube-api-access-rqg4n\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.902994 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-node-mnt\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.903607 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-crc-storage\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:28 crc kubenswrapper[4773]: I0122 13:16:28.923156 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqg4n\" (UniqueName: \"kubernetes.io/projected/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-kube-api-access-rqg4n\") pod \"crc-storage-crc-sc566\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:29 crc kubenswrapper[4773]: I0122 13:16:29.062785 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:29 crc kubenswrapper[4773]: I0122 13:16:29.525834 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sc566"] Jan 22 13:16:29 crc kubenswrapper[4773]: W0122 13:16:29.535327 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9fa49ba8_29af_46e1_a44f_116ce0ccb74d.slice/crio-93bdb72f682023f88581e5c961e6aacb2cdb988dcb1649502ce6ba02c21dd403 WatchSource:0}: Error finding container 93bdb72f682023f88581e5c961e6aacb2cdb988dcb1649502ce6ba02c21dd403: Status 404 returned error can't find the container with id 93bdb72f682023f88581e5c961e6aacb2cdb988dcb1649502ce6ba02c21dd403 Jan 22 13:16:30 crc kubenswrapper[4773]: I0122 13:16:30.029169 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sc566" event={"ID":"9fa49ba8-29af-46e1-a44f-116ce0ccb74d","Type":"ContainerStarted","Data":"93bdb72f682023f88581e5c961e6aacb2cdb988dcb1649502ce6ba02c21dd403"} Jan 22 13:16:31 crc kubenswrapper[4773]: I0122 13:16:31.038528 4773 generic.go:334] "Generic (PLEG): container finished" podID="9fa49ba8-29af-46e1-a44f-116ce0ccb74d" containerID="0c05b856e7d00e13e3028bc939eedc9cd6011a359a9fca966dd4d37ba92da248" exitCode=0 Jan 22 13:16:31 crc kubenswrapper[4773]: I0122 13:16:31.038586 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sc566" event={"ID":"9fa49ba8-29af-46e1-a44f-116ce0ccb74d","Type":"ContainerDied","Data":"0c05b856e7d00e13e3028bc939eedc9cd6011a359a9fca966dd4d37ba92da248"} Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.338951 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.457515 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqg4n\" (UniqueName: \"kubernetes.io/projected/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-kube-api-access-rqg4n\") pod \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.457599 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-crc-storage\") pod \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.457620 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-node-mnt\") pod \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\" (UID: \"9fa49ba8-29af-46e1-a44f-116ce0ccb74d\") " Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.458022 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "9fa49ba8-29af-46e1-a44f-116ce0ccb74d" (UID: "9fa49ba8-29af-46e1-a44f-116ce0ccb74d"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.466957 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-kube-api-access-rqg4n" (OuterVolumeSpecName: "kube-api-access-rqg4n") pod "9fa49ba8-29af-46e1-a44f-116ce0ccb74d" (UID: "9fa49ba8-29af-46e1-a44f-116ce0ccb74d"). InnerVolumeSpecName "kube-api-access-rqg4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.495000 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "9fa49ba8-29af-46e1-a44f-116ce0ccb74d" (UID: "9fa49ba8-29af-46e1-a44f-116ce0ccb74d"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.560561 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqg4n\" (UniqueName: \"kubernetes.io/projected/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-kube-api-access-rqg4n\") on node \"crc\" DevicePath \"\"" Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.560640 4773 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 22 13:16:32 crc kubenswrapper[4773]: I0122 13:16:32.560673 4773 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9fa49ba8-29af-46e1-a44f-116ce0ccb74d-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 22 13:16:33 crc kubenswrapper[4773]: I0122 13:16:33.069967 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sc566" event={"ID":"9fa49ba8-29af-46e1-a44f-116ce0ccb74d","Type":"ContainerDied","Data":"93bdb72f682023f88581e5c961e6aacb2cdb988dcb1649502ce6ba02c21dd403"} Jan 22 13:16:33 crc kubenswrapper[4773]: I0122 13:16:33.070315 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93bdb72f682023f88581e5c961e6aacb2cdb988dcb1649502ce6ba02c21dd403" Jan 22 13:16:33 crc kubenswrapper[4773]: I0122 13:16:33.070106 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc566" Jan 22 13:17:04 crc kubenswrapper[4773]: I0122 13:17:04.155484 4773 scope.go:117] "RemoveContainer" containerID="7e506d06cf8f20eb5b3b83dfa4bc8179f298ee6aeee03f6e83ffacf95c015a0d" Jan 22 13:17:34 crc kubenswrapper[4773]: I0122 13:17:34.074601 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:17:34 crc kubenswrapper[4773]: I0122 13:17:34.075250 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:18:04 crc kubenswrapper[4773]: I0122 13:18:04.075060 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:18:04 crc kubenswrapper[4773]: I0122 13:18:04.077907 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.958696 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-6kffd"] Jan 22 13:18:33 crc kubenswrapper[4773]: E0122 13:18:33.959447 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fa49ba8-29af-46e1-a44f-116ce0ccb74d" containerName="storage" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.959460 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fa49ba8-29af-46e1-a44f-116ce0ccb74d" containerName="storage" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.959608 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fa49ba8-29af-46e1-a44f-116ce0ccb74d" containerName="storage" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.960364 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.963121 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.963364 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.963493 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-jfj8x" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.963605 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.970723 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-bm6cs"] Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.972119 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.974174 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.982174 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-6kffd"] Jan 22 13:18:33 crc kubenswrapper[4773]: I0122 13:18:33.991647 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-bm6cs"] Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.074822 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.074890 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.074941 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.075619 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.075698 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" gracePeriod=600 Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.128411 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-dns-svc\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.128517 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqrjv\" (UniqueName: \"kubernetes.io/projected/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-kube-api-access-jqrjv\") pod \"dnsmasq-dns-5986db9b4f-6kffd\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.128667 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm9gv\" (UniqueName: \"kubernetes.io/projected/d2db4b36-5041-41b6-9311-24e6b251b290-kube-api-access-sm9gv\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.128693 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-config\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.128876 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-config\") pod \"dnsmasq-dns-5986db9b4f-6kffd\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: E0122 13:18:34.200325 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.229942 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-config\") pod \"dnsmasq-dns-5986db9b4f-6kffd\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.230031 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-dns-svc\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.230080 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqrjv\" (UniqueName: \"kubernetes.io/projected/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-kube-api-access-jqrjv\") pod \"dnsmasq-dns-5986db9b4f-6kffd\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.230164 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm9gv\" (UniqueName: \"kubernetes.io/projected/d2db4b36-5041-41b6-9311-24e6b251b290-kube-api-access-sm9gv\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.230186 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-config\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.230905 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-dns-svc\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.231198 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-config\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.231470 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-config\") pod \"dnsmasq-dns-5986db9b4f-6kffd\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.251079 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm9gv\" (UniqueName: \"kubernetes.io/projected/d2db4b36-5041-41b6-9311-24e6b251b290-kube-api-access-sm9gv\") pod \"dnsmasq-dns-56bbd59dc5-bm6cs\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.256491 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqrjv\" (UniqueName: \"kubernetes.io/projected/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-kube-api-access-jqrjv\") pod \"dnsmasq-dns-5986db9b4f-6kffd\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.290194 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.299020 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.383891 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-bm6cs"] Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.423817 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-85nxb"] Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.430754 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.432605 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjlrf\" (UniqueName: \"kubernetes.io/projected/5877eb56-c6f0-4bac-b65e-acfba6e0e515-kube-api-access-qjlrf\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.432655 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-config\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.432682 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-dns-svc\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.452489 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-85nxb"] Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.537316 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjlrf\" (UniqueName: \"kubernetes.io/projected/5877eb56-c6f0-4bac-b65e-acfba6e0e515-kube-api-access-qjlrf\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.537421 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-config\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.537512 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-dns-svc\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.538997 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-config\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.541310 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-dns-svc\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.567173 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjlrf\" (UniqueName: \"kubernetes.io/projected/5877eb56-c6f0-4bac-b65e-acfba6e0e515-kube-api-access-qjlrf\") pod \"dnsmasq-dns-865d9b578f-85nxb\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.754711 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.883199 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-bm6cs"] Jan 22 13:18:34 crc kubenswrapper[4773]: I0122 13:18:34.998182 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-6kffd"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.017014 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-6kffd"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.043795 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-c256q"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.045304 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.054657 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-c256q"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.076610 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.076674 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-config\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.076757 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgqd7\" (UniqueName: \"kubernetes.io/projected/090e1ede-1784-49b2-9f35-400070db2926-kube-api-access-hgqd7\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.123355 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-85nxb"] Jan 22 13:18:35 crc kubenswrapper[4773]: W0122 13:18:35.140210 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5877eb56_c6f0_4bac_b65e_acfba6e0e515.slice/crio-01d3ffb1c0929b1bb0d8c545a21d0c777954ca1e164e16225423a90bc7884578 WatchSource:0}: Error finding container 01d3ffb1c0929b1bb0d8c545a21d0c777954ca1e164e16225423a90bc7884578: Status 404 returned error can't find the container with id 01d3ffb1c0929b1bb0d8c545a21d0c777954ca1e164e16225423a90bc7884578 Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.183433 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.183947 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-config\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.184153 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgqd7\" (UniqueName: \"kubernetes.io/projected/090e1ede-1784-49b2-9f35-400070db2926-kube-api-access-hgqd7\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.185450 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.187192 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-config\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.198805 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" event={"ID":"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0","Type":"ContainerStarted","Data":"07bad35e1b343e70ffcac57db12e986ad431a5b3c6fb6300a61d6571ff6efe80"} Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.210490 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" event={"ID":"5877eb56-c6f0-4bac-b65e-acfba6e0e515","Type":"ContainerStarted","Data":"01d3ffb1c0929b1bb0d8c545a21d0c777954ca1e164e16225423a90bc7884578"} Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.218332 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgqd7\" (UniqueName: \"kubernetes.io/projected/090e1ede-1784-49b2-9f35-400070db2926-kube-api-access-hgqd7\") pod \"dnsmasq-dns-5d79f765b5-c256q\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.220509 4773 generic.go:334] "Generic (PLEG): container finished" podID="d2db4b36-5041-41b6-9311-24e6b251b290" containerID="36a14fadf930809aedf22b8646e6e34dbc02e00ca1e8da9791096da670d2087b" exitCode=0 Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.220774 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" event={"ID":"d2db4b36-5041-41b6-9311-24e6b251b290","Type":"ContainerDied","Data":"36a14fadf930809aedf22b8646e6e34dbc02e00ca1e8da9791096da670d2087b"} Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.220810 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" event={"ID":"d2db4b36-5041-41b6-9311-24e6b251b290","Type":"ContainerStarted","Data":"6da3e8e605e6970572f276ec768c1d5854d15edf22adf75b10f69ee262e6304c"} Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.273645 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" exitCode=0 Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.273696 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47"} Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.273751 4773 scope.go:117] "RemoveContainer" containerID="d4e2d921efb64680f5d5b106790c3c2cc804fe6e41b407d96c6141a303a48c40" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.274479 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:18:35 crc kubenswrapper[4773]: E0122 13:18:35.275082 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.460394 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.584018 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.585251 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.594412 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qxm6s" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.594664 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.595106 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.597953 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.598578 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.598775 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.599218 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.620110 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.673088 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.694878 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.694967 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k86b\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-kube-api-access-5k86b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695007 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4af4d4e7-d821-46cb-8609-c96c8521308b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695031 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695094 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4af4d4e7-d821-46cb-8609-c96c8521308b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695117 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695149 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695168 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695189 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695212 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.695236 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.803782 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm9gv\" (UniqueName: \"kubernetes.io/projected/d2db4b36-5041-41b6-9311-24e6b251b290-kube-api-access-sm9gv\") pod \"d2db4b36-5041-41b6-9311-24e6b251b290\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.803847 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-dns-svc\") pod \"d2db4b36-5041-41b6-9311-24e6b251b290\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.803917 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-config\") pod \"d2db4b36-5041-41b6-9311-24e6b251b290\" (UID: \"d2db4b36-5041-41b6-9311-24e6b251b290\") " Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804072 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804108 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4af4d4e7-d821-46cb-8609-c96c8521308b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804130 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804168 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804184 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804203 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804226 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804241 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804259 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804320 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k86b\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-kube-api-access-5k86b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804360 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4af4d4e7-d821-46cb-8609-c96c8521308b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.804797 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.809043 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2db4b36-5041-41b6-9311-24e6b251b290-kube-api-access-sm9gv" (OuterVolumeSpecName: "kube-api-access-sm9gv") pod "d2db4b36-5041-41b6-9311-24e6b251b290" (UID: "d2db4b36-5041-41b6-9311-24e6b251b290"). InnerVolumeSpecName "kube-api-access-sm9gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.810672 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4af4d4e7-d821-46cb-8609-c96c8521308b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.811411 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.811888 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.812206 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.813178 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.817312 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.839034 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d2db4b36-5041-41b6-9311-24e6b251b290" (UID: "d2db4b36-5041-41b6-9311-24e6b251b290"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.847103 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4af4d4e7-d821-46cb-8609-c96c8521308b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.855991 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.856490 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-config" (OuterVolumeSpecName: "config") pod "d2db4b36-5041-41b6-9311-24e6b251b290" (UID: "d2db4b36-5041-41b6-9311-24e6b251b290"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.859264 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-c256q"] Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.865796 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.865834 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6b48504644dee6ae822cbe8bfe746e4b3fd51178841c3f8235344fb47eab87d0/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.871989 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k86b\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-kube-api-access-5k86b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.906193 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.906220 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2db4b36-5041-41b6-9311-24e6b251b290-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.906230 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm9gv\" (UniqueName: \"kubernetes.io/projected/d2db4b36-5041-41b6-9311-24e6b251b290-kube-api-access-sm9gv\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.942174 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:35 crc kubenswrapper[4773]: I0122 13:18:35.992030 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.190157 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:18:36 crc kubenswrapper[4773]: E0122 13:18:36.190806 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2db4b36-5041-41b6-9311-24e6b251b290" containerName="init" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.190823 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2db4b36-5041-41b6-9311-24e6b251b290" containerName="init" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.190985 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2db4b36-5041-41b6-9311-24e6b251b290" containerName="init" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.191896 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.193802 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.205998 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.206170 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-2l49c" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.206366 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.206452 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.207069 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.210548 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.211343 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.286487 4773 generic.go:334] "Generic (PLEG): container finished" podID="090e1ede-1784-49b2-9f35-400070db2926" containerID="5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7" exitCode=0 Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.286539 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" event={"ID":"090e1ede-1784-49b2-9f35-400070db2926","Type":"ContainerDied","Data":"5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7"} Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.286664 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" event={"ID":"090e1ede-1784-49b2-9f35-400070db2926","Type":"ContainerStarted","Data":"b0044ffc18606d16b60601653287fe4d0f98fc86b96a4bb557eb0716bbb14200"} Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.288003 4773 generic.go:334] "Generic (PLEG): container finished" podID="6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" containerID="09c345192a74e11710178c32cae2661ae8b7ba7bc0ace7fe0bc3062b873b1666" exitCode=0 Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.288067 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" event={"ID":"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0","Type":"ContainerDied","Data":"09c345192a74e11710178c32cae2661ae8b7ba7bc0ace7fe0bc3062b873b1666"} Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.290266 4773 generic.go:334] "Generic (PLEG): container finished" podID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerID="03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8" exitCode=0 Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.290349 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" event={"ID":"5877eb56-c6f0-4bac-b65e-acfba6e0e515","Type":"ContainerDied","Data":"03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8"} Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.292498 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" event={"ID":"d2db4b36-5041-41b6-9311-24e6b251b290","Type":"ContainerDied","Data":"6da3e8e605e6970572f276ec768c1d5854d15edf22adf75b10f69ee262e6304c"} Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.292536 4773 scope.go:117] "RemoveContainer" containerID="36a14fadf930809aedf22b8646e6e34dbc02e00ca1e8da9791096da670d2087b" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.292633 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-bm6cs" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312650 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312728 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cfc01903-fce2-4ce5-a669-804e05bafa95-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312759 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312799 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cfc01903-fce2-4ce5-a669-804e05bafa95-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312838 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-config-data\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312857 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312875 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312891 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312911 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlqjw\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-kube-api-access-nlqjw\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312936 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.312961 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414268 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414335 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414395 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlqjw\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-kube-api-access-nlqjw\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414427 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414469 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414514 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414553 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cfc01903-fce2-4ce5-a669-804e05bafa95-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414587 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cfc01903-fce2-4ce5-a669-804e05bafa95-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414660 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-config-data\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.414684 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.415696 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.415831 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.416829 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.418453 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.418653 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4ed1801c2b645a90d9988960310f81c674791f1ebaa9a5a8d5281bd95a824772/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.419026 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-server-conf\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.419163 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-config-data\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.423659 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.428020 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cfc01903-fce2-4ce5-a669-804e05bafa95-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.429219 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cfc01903-fce2-4ce5-a669-804e05bafa95-pod-info\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.441809 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.442086 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.448207 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlqjw\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-kube-api-access-nlqjw\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.460214 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-bm6cs"] Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.472017 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-bm6cs"] Jan 22 13:18:36 crc kubenswrapper[4773]: W0122 13:18:36.472622 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4af4d4e7_d821_46cb_8609_c96c8521308b.slice/crio-407679c7233bc47fccffc61ef2da34098d9058d6b039b705180a1b76fa8c8276 WatchSource:0}: Error finding container 407679c7233bc47fccffc61ef2da34098d9058d6b039b705180a1b76fa8c8276: Status 404 returned error can't find the container with id 407679c7233bc47fccffc61ef2da34098d9058d6b039b705180a1b76fa8c8276 Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.475355 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " pod="openstack/rabbitmq-server-0" Jan 22 13:18:36 crc kubenswrapper[4773]: I0122 13:18:36.564188 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 13:18:38 crc kubenswrapper[4773]: E0122 13:18:36.566917 4773 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 22 13:18:38 crc kubenswrapper[4773]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/5877eb56-c6f0-4bac-b65e-acfba6e0e515/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 22 13:18:38 crc kubenswrapper[4773]: > podSandboxID="01d3ffb1c0929b1bb0d8c545a21d0c777954ca1e164e16225423a90bc7884578" Jan 22 13:18:38 crc kubenswrapper[4773]: E0122 13:18:36.567149 4773 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 22 13:18:38 crc kubenswrapper[4773]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb6hc5h68h68h594h659hdbh679h65ch5f6hdch6h5b9h8fh55hfhf8h57fhc7h56ch687h669h559h678h5dhc7hf7h697h5d6h9ch669h54fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qjlrf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-865d9b578f-85nxb_openstack(5877eb56-c6f0-4bac-b65e-acfba6e0e515): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/5877eb56-c6f0-4bac-b65e-acfba6e0e515/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 22 13:18:38 crc kubenswrapper[4773]: > logger="UnhandledError" Jan 22 13:18:38 crc kubenswrapper[4773]: E0122 13:18:36.568306 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/5877eb56-c6f0-4bac-b65e-acfba6e0e515/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.645742 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.670416 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2db4b36-5041-41b6-9311-24e6b251b290" path="/var/lib/kubelet/pods/d2db4b36-5041-41b6-9311-24e6b251b290/volumes" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.719560 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqrjv\" (UniqueName: \"kubernetes.io/projected/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-kube-api-access-jqrjv\") pod \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.719978 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-config\") pod \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\" (UID: \"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0\") " Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.724318 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-kube-api-access-jqrjv" (OuterVolumeSpecName: "kube-api-access-jqrjv") pod "6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" (UID: "6249a7d4-1404-4fc0-a9b5-207fd96ca0c0"). InnerVolumeSpecName "kube-api-access-jqrjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.740294 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 22 13:18:38 crc kubenswrapper[4773]: E0122 13:18:36.740618 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" containerName="init" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.740630 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" containerName="init" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.740761 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" containerName="init" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.741480 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.744811 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-9lnqm" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.749134 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-config" (OuterVolumeSpecName: "config") pod "6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" (UID: "6249a7d4-1404-4fc0-a9b5-207fd96ca0c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.753381 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.753388 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.753482 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.756369 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.760788 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821446 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/812c3f46-cf97-4dc8-9c19-68477c9f8eca-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821523 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/812c3f46-cf97-4dc8-9c19-68477c9f8eca-config-data-generated\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821552 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/812c3f46-cf97-4dc8-9c19-68477c9f8eca-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821576 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-config-data-default\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821772 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk967\" (UniqueName: \"kubernetes.io/projected/812c3f46-cf97-4dc8-9c19-68477c9f8eca-kube-api-access-xk967\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821830 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821863 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-kolla-config\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.821896 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-operator-scripts\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.822065 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqrjv\" (UniqueName: \"kubernetes.io/projected/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-kube-api-access-jqrjv\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.822083 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923220 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/812c3f46-cf97-4dc8-9c19-68477c9f8eca-config-data-generated\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923294 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/812c3f46-cf97-4dc8-9c19-68477c9f8eca-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923326 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-config-data-default\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923355 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk967\" (UniqueName: \"kubernetes.io/projected/812c3f46-cf97-4dc8-9c19-68477c9f8eca-kube-api-access-xk967\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923375 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923393 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-kolla-config\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923412 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-operator-scripts\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.923455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/812c3f46-cf97-4dc8-9c19-68477c9f8eca-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.924645 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-kolla-config\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.925124 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/812c3f46-cf97-4dc8-9c19-68477c9f8eca-config-data-generated\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.925554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-operator-scripts\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.926078 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.926099 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ba33f090525062e6346b049cd0c8eddef4cd09e7b09ea822eb6bef80727ae7c2/globalmount\"" pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.926528 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/812c3f46-cf97-4dc8-9c19-68477c9f8eca-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.927762 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/812c3f46-cf97-4dc8-9c19-68477c9f8eca-config-data-default\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.927850 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/812c3f46-cf97-4dc8-9c19-68477c9f8eca-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.950813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk967\" (UniqueName: \"kubernetes.io/projected/812c3f46-cf97-4dc8-9c19-68477c9f8eca-kube-api-access-xk967\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:36.964257 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8898954f-2073-4cfe-9a0f-15d028ec3ea2\") pod \"openstack-galera-0\" (UID: \"812c3f46-cf97-4dc8-9c19-68477c9f8eca\") " pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.067733 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.300984 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4af4d4e7-d821-46cb-8609-c96c8521308b","Type":"ContainerStarted","Data":"407679c7233bc47fccffc61ef2da34098d9058d6b039b705180a1b76fa8c8276"} Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.303211 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" event={"ID":"090e1ede-1784-49b2-9f35-400070db2926","Type":"ContainerStarted","Data":"3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d"} Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.303344 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.304906 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" event={"ID":"6249a7d4-1404-4fc0-a9b5-207fd96ca0c0","Type":"ContainerDied","Data":"07bad35e1b343e70ffcac57db12e986ad431a5b3c6fb6300a61d6571ff6efe80"} Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.304950 4773 scope.go:117] "RemoveContainer" containerID="09c345192a74e11710178c32cae2661ae8b7ba7bc0ace7fe0bc3062b873b1666" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.304969 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-6kffd" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.331065 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" podStartSLOduration=2.330861739 podStartE2EDuration="2.330861739s" podCreationTimestamp="2026-01-22 13:18:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:18:37.327986617 +0000 UTC m=+5024.906102452" watchObservedRunningTime="2026-01-22 13:18:37.330861739 +0000 UTC m=+5024.908977564" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.426224 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-6kffd"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:37.431664 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-6kffd"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.140005 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.141239 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.145493 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.145797 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-hc72b" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.145986 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.146671 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.153118 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245268 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6l88\" (UniqueName: \"kubernetes.io/projected/344a28a5-4b8d-4f67-8165-5cff172af873-kube-api-access-z6l88\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245348 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245381 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245472 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245501 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245554 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344a28a5-4b8d-4f67-8165-5cff172af873-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245604 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/344a28a5-4b8d-4f67-8165-5cff172af873-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.245683 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/344a28a5-4b8d-4f67-8165-5cff172af873-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347272 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344a28a5-4b8d-4f67-8165-5cff172af873-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347375 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/344a28a5-4b8d-4f67-8165-5cff172af873-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347405 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/344a28a5-4b8d-4f67-8165-5cff172af873-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347456 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6l88\" (UniqueName: \"kubernetes.io/projected/344a28a5-4b8d-4f67-8165-5cff172af873-kube-api-access-z6l88\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347495 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347519 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347572 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.347622 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.348039 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/344a28a5-4b8d-4f67-8165-5cff172af873-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.350247 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.350757 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.357232 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.357277 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2f4f18192f1398f5dac9c0b77c65d98b6b738d855457be19995694ee7ca3c0e2/globalmount\"" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.361593 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/344a28a5-4b8d-4f67-8165-5cff172af873-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.363416 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344a28a5-4b8d-4f67-8165-5cff172af873-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.364525 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/344a28a5-4b8d-4f67-8165-5cff172af873-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.369693 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6l88\" (UniqueName: \"kubernetes.io/projected/344a28a5-4b8d-4f67-8165-5cff172af873-kube-api-access-z6l88\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.390794 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-aea16601-4ef0-4d1d-b441-22a70ccad9a8\") pod \"openstack-cell1-galera-0\" (UID: \"344a28a5-4b8d-4f67-8165-5cff172af873\") " pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.464652 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.512749 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.514077 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.518865 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-kcv4c" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.519147 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.519239 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.534116 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.653651 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-memcached-tls-certs\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.653781 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-combined-ca-bundle\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.653813 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-kolla-config\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.653853 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29pz6\" (UniqueName: \"kubernetes.io/projected/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-kube-api-access-29pz6\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.653916 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-config-data\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.670000 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6249a7d4-1404-4fc0-a9b5-207fd96ca0c0" path="/var/lib/kubelet/pods/6249a7d4-1404-4fc0-a9b5-207fd96ca0c0/volumes" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.754961 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-combined-ca-bundle\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.755328 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-kolla-config\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.755374 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29pz6\" (UniqueName: \"kubernetes.io/projected/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-kube-api-access-29pz6\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.755429 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-config-data\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.755544 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-memcached-tls-certs\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.756241 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-kolla-config\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.756390 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-config-data\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.764029 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-combined-ca-bundle\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.769095 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-memcached-tls-certs\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.772016 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29pz6\" (UniqueName: \"kubernetes.io/projected/83b59e90-0bfd-47a7-8087-6c4689bbb0bd-kube-api-access-29pz6\") pod \"memcached-0\" (UID: \"83b59e90-0bfd-47a7-8087-6c4689bbb0bd\") " pod="openstack/memcached-0" Jan 22 13:18:38 crc kubenswrapper[4773]: I0122 13:18:38.864670 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.128922 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.135116 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.140348 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.290539 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 22 13:18:39 crc kubenswrapper[4773]: W0122 13:18:39.298533 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83b59e90_0bfd_47a7_8087_6c4689bbb0bd.slice/crio-fbb38f06faa617193c62213fdea8acc66f97a315c669f185944ecfc8c09482ad WatchSource:0}: Error finding container fbb38f06faa617193c62213fdea8acc66f97a315c669f185944ecfc8c09482ad: Status 404 returned error can't find the container with id fbb38f06faa617193c62213fdea8acc66f97a315c669f185944ecfc8c09482ad Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.322930 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" event={"ID":"5877eb56-c6f0-4bac-b65e-acfba6e0e515","Type":"ContainerStarted","Data":"ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.323146 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.324393 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"344a28a5-4b8d-4f67-8165-5cff172af873","Type":"ContainerStarted","Data":"a82de3338122bf11bb5468e219383273c244f61f1f47e50f55e366549969d744"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.324423 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"344a28a5-4b8d-4f67-8165-5cff172af873","Type":"ContainerStarted","Data":"c1cd8517355d2b3430851634e43e5efbc6d603a253f030ecdf683c7f1a00caa4"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.326219 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cfc01903-fce2-4ce5-a669-804e05bafa95","Type":"ContainerStarted","Data":"424c3566acb93c2ab0643d6063736d239efd5a305a3cca0bb4f978c052d70ca5"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.328267 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4af4d4e7-d821-46cb-8609-c96c8521308b","Type":"ContainerStarted","Data":"1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.329891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"812c3f46-cf97-4dc8-9c19-68477c9f8eca","Type":"ContainerStarted","Data":"4fc8ba309d48be565bc36e94d0ea93ec1ddef81442fcbf83530cafdcc7d01b74"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.329926 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"812c3f46-cf97-4dc8-9c19-68477c9f8eca","Type":"ContainerStarted","Data":"2d65bdb643175d838d206d54707eb6c58a3591a17b1e07d240139fa9b675fa94"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.331032 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"83b59e90-0bfd-47a7-8087-6c4689bbb0bd","Type":"ContainerStarted","Data":"fbb38f06faa617193c62213fdea8acc66f97a315c669f185944ecfc8c09482ad"} Jan 22 13:18:39 crc kubenswrapper[4773]: I0122 13:18:39.347552 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" podStartSLOduration=5.34753385 podStartE2EDuration="5.34753385s" podCreationTimestamp="2026-01-22 13:18:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:18:39.342197659 +0000 UTC m=+5026.920313494" watchObservedRunningTime="2026-01-22 13:18:39.34753385 +0000 UTC m=+5026.925649675" Jan 22 13:18:40 crc kubenswrapper[4773]: I0122 13:18:40.341331 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cfc01903-fce2-4ce5-a669-804e05bafa95","Type":"ContainerStarted","Data":"538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505"} Jan 22 13:18:40 crc kubenswrapper[4773]: I0122 13:18:40.343047 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"83b59e90-0bfd-47a7-8087-6c4689bbb0bd","Type":"ContainerStarted","Data":"1760f4e6e70a708fe2766c194dd4b44e8d84788d1c2b2c87c8bd6a2455d195ae"} Jan 22 13:18:40 crc kubenswrapper[4773]: I0122 13:18:40.404419 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.40439915 podStartE2EDuration="2.40439915s" podCreationTimestamp="2026-01-22 13:18:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:18:40.393675148 +0000 UTC m=+5027.971791013" watchObservedRunningTime="2026-01-22 13:18:40.40439915 +0000 UTC m=+5027.982514965" Jan 22 13:18:41 crc kubenswrapper[4773]: I0122 13:18:41.351706 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 22 13:18:43 crc kubenswrapper[4773]: I0122 13:18:43.366107 4773 generic.go:334] "Generic (PLEG): container finished" podID="344a28a5-4b8d-4f67-8165-5cff172af873" containerID="a82de3338122bf11bb5468e219383273c244f61f1f47e50f55e366549969d744" exitCode=0 Jan 22 13:18:43 crc kubenswrapper[4773]: I0122 13:18:43.366207 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"344a28a5-4b8d-4f67-8165-5cff172af873","Type":"ContainerDied","Data":"a82de3338122bf11bb5468e219383273c244f61f1f47e50f55e366549969d744"} Jan 22 13:18:43 crc kubenswrapper[4773]: I0122 13:18:43.370095 4773 generic.go:334] "Generic (PLEG): container finished" podID="812c3f46-cf97-4dc8-9c19-68477c9f8eca" containerID="4fc8ba309d48be565bc36e94d0ea93ec1ddef81442fcbf83530cafdcc7d01b74" exitCode=0 Jan 22 13:18:43 crc kubenswrapper[4773]: I0122 13:18:43.370137 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"812c3f46-cf97-4dc8-9c19-68477c9f8eca","Type":"ContainerDied","Data":"4fc8ba309d48be565bc36e94d0ea93ec1ddef81442fcbf83530cafdcc7d01b74"} Jan 22 13:18:44 crc kubenswrapper[4773]: I0122 13:18:44.381732 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"812c3f46-cf97-4dc8-9c19-68477c9f8eca","Type":"ContainerStarted","Data":"8a5ea82d7c38ec24f57c6f414245548d9a5f23b6b091e7002b56dc7d21501822"} Jan 22 13:18:44 crc kubenswrapper[4773]: I0122 13:18:44.383712 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"344a28a5-4b8d-4f67-8165-5cff172af873","Type":"ContainerStarted","Data":"db29ed6ac1a8f2c1653717753c1ab23fb94a6db2e6cddcab6252fc2b9b4484fa"} Jan 22 13:18:44 crc kubenswrapper[4773]: I0122 13:18:44.405357 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.405334536 podStartE2EDuration="9.405334536s" podCreationTimestamp="2026-01-22 13:18:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:18:44.400768537 +0000 UTC m=+5031.978884362" watchObservedRunningTime="2026-01-22 13:18:44.405334536 +0000 UTC m=+5031.983450361" Jan 22 13:18:44 crc kubenswrapper[4773]: I0122 13:18:44.430085 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.430066864 podStartE2EDuration="7.430066864s" podCreationTimestamp="2026-01-22 13:18:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:18:44.427850751 +0000 UTC m=+5032.005966586" watchObservedRunningTime="2026-01-22 13:18:44.430066864 +0000 UTC m=+5032.008182689" Jan 22 13:18:44 crc kubenswrapper[4773]: I0122 13:18:44.757116 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.461517 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.477937 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6w2zd"] Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.480168 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.491514 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6w2zd"] Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.511090 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-85nxb"] Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.511335 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerName="dnsmasq-dns" containerID="cri-o://ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f" gracePeriod=10 Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.572235 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6g4t\" (UniqueName: \"kubernetes.io/projected/4845cf42-06af-4263-99e7-3f46d106d6ce-kube-api-access-v6g4t\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.572283 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-utilities\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.572350 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-catalog-content\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.680687 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-utilities\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.680757 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-catalog-content\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.680852 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6g4t\" (UniqueName: \"kubernetes.io/projected/4845cf42-06af-4263-99e7-3f46d106d6ce-kube-api-access-v6g4t\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.681934 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-utilities\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.682220 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-catalog-content\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.701387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6g4t\" (UniqueName: \"kubernetes.io/projected/4845cf42-06af-4263-99e7-3f46d106d6ce-kube-api-access-v6g4t\") pod \"community-operators-6w2zd\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.858702 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:45 crc kubenswrapper[4773]: I0122 13:18:45.993993 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.087794 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-dns-svc\") pod \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.088048 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjlrf\" (UniqueName: \"kubernetes.io/projected/5877eb56-c6f0-4bac-b65e-acfba6e0e515-kube-api-access-qjlrf\") pod \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.088193 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-config\") pod \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\" (UID: \"5877eb56-c6f0-4bac-b65e-acfba6e0e515\") " Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.096425 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5877eb56-c6f0-4bac-b65e-acfba6e0e515-kube-api-access-qjlrf" (OuterVolumeSpecName: "kube-api-access-qjlrf") pod "5877eb56-c6f0-4bac-b65e-acfba6e0e515" (UID: "5877eb56-c6f0-4bac-b65e-acfba6e0e515"). InnerVolumeSpecName "kube-api-access-qjlrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.148557 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5877eb56-c6f0-4bac-b65e-acfba6e0e515" (UID: "5877eb56-c6f0-4bac-b65e-acfba6e0e515"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.170586 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-config" (OuterVolumeSpecName: "config") pod "5877eb56-c6f0-4bac-b65e-acfba6e0e515" (UID: "5877eb56-c6f0-4bac-b65e-acfba6e0e515"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.190111 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.190149 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5877eb56-c6f0-4bac-b65e-acfba6e0e515-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.190169 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjlrf\" (UniqueName: \"kubernetes.io/projected/5877eb56-c6f0-4bac-b65e-acfba6e0e515-kube-api-access-qjlrf\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.353804 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6w2zd"] Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.398492 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6w2zd" event={"ID":"4845cf42-06af-4263-99e7-3f46d106d6ce","Type":"ContainerStarted","Data":"11a65d149440de9b4ab26273efe28decc2f5add1d291bd5c58d1f7bd8318719b"} Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.400512 4773 generic.go:334] "Generic (PLEG): container finished" podID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerID="ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f" exitCode=0 Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.400562 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" event={"ID":"5877eb56-c6f0-4bac-b65e-acfba6e0e515","Type":"ContainerDied","Data":"ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f"} Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.400609 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" event={"ID":"5877eb56-c6f0-4bac-b65e-acfba6e0e515","Type":"ContainerDied","Data":"01d3ffb1c0929b1bb0d8c545a21d0c777954ca1e164e16225423a90bc7884578"} Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.400602 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-85nxb" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.400624 4773 scope.go:117] "RemoveContainer" containerID="ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.423573 4773 scope.go:117] "RemoveContainer" containerID="03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.441191 4773 scope.go:117] "RemoveContainer" containerID="ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f" Jan 22 13:18:46 crc kubenswrapper[4773]: E0122 13:18:46.442853 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f\": container with ID starting with ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f not found: ID does not exist" containerID="ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.442928 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f"} err="failed to get container status \"ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f\": rpc error: code = NotFound desc = could not find container \"ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f\": container with ID starting with ceb9e73c335096c2e9fd8e28a87e03a3bca26252329fb7d51487a6e0717b836f not found: ID does not exist" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.442972 4773 scope.go:117] "RemoveContainer" containerID="03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8" Jan 22 13:18:46 crc kubenswrapper[4773]: E0122 13:18:46.443396 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8\": container with ID starting with 03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8 not found: ID does not exist" containerID="03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.443447 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8"} err="failed to get container status \"03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8\": rpc error: code = NotFound desc = could not find container \"03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8\": container with ID starting with 03a4cc5934be2587f17525d0f60cbdffebf7201193efcf50c3129efa928bb9d8 not found: ID does not exist" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.457592 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-85nxb"] Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.464119 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-85nxb"] Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.658350 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:18:46 crc kubenswrapper[4773]: E0122 13:18:46.658854 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:18:46 crc kubenswrapper[4773]: I0122 13:18:46.670298 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" path="/var/lib/kubelet/pods/5877eb56-c6f0-4bac-b65e-acfba6e0e515/volumes" Jan 22 13:18:47 crc kubenswrapper[4773]: I0122 13:18:47.068401 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 22 13:18:47 crc kubenswrapper[4773]: I0122 13:18:47.068453 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 22 13:18:47 crc kubenswrapper[4773]: I0122 13:18:47.411419 4773 generic.go:334] "Generic (PLEG): container finished" podID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerID="14f192df553db38eab6a100634de38cc6a338e6c263354972586556bffcb00af" exitCode=0 Jan 22 13:18:47 crc kubenswrapper[4773]: I0122 13:18:47.411522 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6w2zd" event={"ID":"4845cf42-06af-4263-99e7-3f46d106d6ce","Type":"ContainerDied","Data":"14f192df553db38eab6a100634de38cc6a338e6c263354972586556bffcb00af"} Jan 22 13:18:48 crc kubenswrapper[4773]: I0122 13:18:48.466100 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:48 crc kubenswrapper[4773]: I0122 13:18:48.467687 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:48 crc kubenswrapper[4773]: I0122 13:18:48.865768 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 22 13:18:49 crc kubenswrapper[4773]: I0122 13:18:49.333036 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 22 13:18:49 crc kubenswrapper[4773]: I0122 13:18:49.408996 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 22 13:18:49 crc kubenswrapper[4773]: I0122 13:18:49.429563 4773 generic.go:334] "Generic (PLEG): container finished" podID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerID="4e2376eac03a7f1be81af64f0db08e3134e3617d0ae396f20ee8969b7e4bfae1" exitCode=0 Jan 22 13:18:49 crc kubenswrapper[4773]: I0122 13:18:49.430985 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6w2zd" event={"ID":"4845cf42-06af-4263-99e7-3f46d106d6ce","Type":"ContainerDied","Data":"4e2376eac03a7f1be81af64f0db08e3134e3617d0ae396f20ee8969b7e4bfae1"} Jan 22 13:18:50 crc kubenswrapper[4773]: I0122 13:18:50.736605 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:50 crc kubenswrapper[4773]: I0122 13:18:50.818815 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.476051 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6w2zd" event={"ID":"4845cf42-06af-4263-99e7-3f46d106d6ce","Type":"ContainerStarted","Data":"e8b7765963df9c2dc296c42af3f840a7e43f7d3e51b1c7d06860ffdb8f3fb040"} Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.860865 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.861300 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.899274 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6w2zd" podStartSLOduration=3.713140084 podStartE2EDuration="10.899249532s" podCreationTimestamp="2026-01-22 13:18:45 +0000 UTC" firstStartedPulling="2026-01-22 13:18:47.415854759 +0000 UTC m=+5034.993970584" lastFinishedPulling="2026-01-22 13:18:54.601964207 +0000 UTC m=+5042.180080032" observedRunningTime="2026-01-22 13:18:55.86303463 +0000 UTC m=+5043.441150475" watchObservedRunningTime="2026-01-22 13:18:55.899249532 +0000 UTC m=+5043.477365357" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.918269 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-8d2ls"] Jan 22 13:18:55 crc kubenswrapper[4773]: E0122 13:18:55.918848 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerName="dnsmasq-dns" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.918926 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerName="dnsmasq-dns" Jan 22 13:18:55 crc kubenswrapper[4773]: E0122 13:18:55.919026 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerName="init" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.919043 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerName="init" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.919313 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5877eb56-c6f0-4bac-b65e-acfba6e0e515" containerName="dnsmasq-dns" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.920191 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.924457 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.926905 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-8d2ls"] Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.946578 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-operator-scripts\") pod \"root-account-create-update-8d2ls\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:55 crc kubenswrapper[4773]: I0122 13:18:55.946943 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p7hm\" (UniqueName: \"kubernetes.io/projected/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-kube-api-access-5p7hm\") pod \"root-account-create-update-8d2ls\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.048942 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-operator-scripts\") pod \"root-account-create-update-8d2ls\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.049071 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p7hm\" (UniqueName: \"kubernetes.io/projected/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-kube-api-access-5p7hm\") pod \"root-account-create-update-8d2ls\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.049749 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-operator-scripts\") pod \"root-account-create-update-8d2ls\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.069903 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p7hm\" (UniqueName: \"kubernetes.io/projected/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-kube-api-access-5p7hm\") pod \"root-account-create-update-8d2ls\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.242728 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.679967 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-8d2ls"] Jan 22 13:18:56 crc kubenswrapper[4773]: W0122 13:18:56.686785 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2647cf5d_245f_46c4_81a1_f5c1b7fd0426.slice/crio-646b5c7cd3a0dbd4b5e78d277b9c98f80d78f6e7d86f2ab856851d36fa8bad68 WatchSource:0}: Error finding container 646b5c7cd3a0dbd4b5e78d277b9c98f80d78f6e7d86f2ab856851d36fa8bad68: Status 404 returned error can't find the container with id 646b5c7cd3a0dbd4b5e78d277b9c98f80d78f6e7d86f2ab856851d36fa8bad68 Jan 22 13:18:56 crc kubenswrapper[4773]: I0122 13:18:56.914031 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-6w2zd" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="registry-server" probeResult="failure" output=< Jan 22 13:18:56 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:18:56 crc kubenswrapper[4773]: > Jan 22 13:18:57 crc kubenswrapper[4773]: I0122 13:18:57.493453 4773 generic.go:334] "Generic (PLEG): container finished" podID="2647cf5d-245f-46c4-81a1-f5c1b7fd0426" containerID="308484e91deabb32171d7af8a675788142922e220d52aad526a35cce3cce6946" exitCode=0 Jan 22 13:18:57 crc kubenswrapper[4773]: I0122 13:18:57.493542 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-8d2ls" event={"ID":"2647cf5d-245f-46c4-81a1-f5c1b7fd0426","Type":"ContainerDied","Data":"308484e91deabb32171d7af8a675788142922e220d52aad526a35cce3cce6946"} Jan 22 13:18:57 crc kubenswrapper[4773]: I0122 13:18:57.493841 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-8d2ls" event={"ID":"2647cf5d-245f-46c4-81a1-f5c1b7fd0426","Type":"ContainerStarted","Data":"646b5c7cd3a0dbd4b5e78d277b9c98f80d78f6e7d86f2ab856851d36fa8bad68"} Jan 22 13:18:58 crc kubenswrapper[4773]: I0122 13:18:58.883110 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.006063 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-operator-scripts\") pod \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.006152 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p7hm\" (UniqueName: \"kubernetes.io/projected/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-kube-api-access-5p7hm\") pod \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\" (UID: \"2647cf5d-245f-46c4-81a1-f5c1b7fd0426\") " Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.006862 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2647cf5d-245f-46c4-81a1-f5c1b7fd0426" (UID: "2647cf5d-245f-46c4-81a1-f5c1b7fd0426"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.012596 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-kube-api-access-5p7hm" (OuterVolumeSpecName: "kube-api-access-5p7hm") pod "2647cf5d-245f-46c4-81a1-f5c1b7fd0426" (UID: "2647cf5d-245f-46c4-81a1-f5c1b7fd0426"). InnerVolumeSpecName "kube-api-access-5p7hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.107900 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.107949 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p7hm\" (UniqueName: \"kubernetes.io/projected/2647cf5d-245f-46c4-81a1-f5c1b7fd0426-kube-api-access-5p7hm\") on node \"crc\" DevicePath \"\"" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.513106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-8d2ls" event={"ID":"2647cf5d-245f-46c4-81a1-f5c1b7fd0426","Type":"ContainerDied","Data":"646b5c7cd3a0dbd4b5e78d277b9c98f80d78f6e7d86f2ab856851d36fa8bad68"} Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.513204 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="646b5c7cd3a0dbd4b5e78d277b9c98f80d78f6e7d86f2ab856851d36fa8bad68" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.513261 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-8d2ls" Jan 22 13:18:59 crc kubenswrapper[4773]: I0122 13:18:59.658543 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:18:59 crc kubenswrapper[4773]: E0122 13:18:59.658898 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:19:02 crc kubenswrapper[4773]: I0122 13:19:02.132414 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-8d2ls"] Jan 22 13:19:02 crc kubenswrapper[4773]: I0122 13:19:02.139144 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-8d2ls"] Jan 22 13:19:02 crc kubenswrapper[4773]: I0122 13:19:02.668221 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2647cf5d-245f-46c4-81a1-f5c1b7fd0426" path="/var/lib/kubelet/pods/2647cf5d-245f-46c4-81a1-f5c1b7fd0426/volumes" Jan 22 13:19:05 crc kubenswrapper[4773]: I0122 13:19:05.907899 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:19:05 crc kubenswrapper[4773]: I0122 13:19:05.956086 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.151497 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rcpnr"] Jan 22 13:19:07 crc kubenswrapper[4773]: E0122 13:19:07.151825 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2647cf5d-245f-46c4-81a1-f5c1b7fd0426" containerName="mariadb-account-create-update" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.151842 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2647cf5d-245f-46c4-81a1-f5c1b7fd0426" containerName="mariadb-account-create-update" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.151999 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2647cf5d-245f-46c4-81a1-f5c1b7fd0426" containerName="mariadb-account-create-update" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.152536 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.155224 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.196557 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rcpnr"] Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.253059 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/074aa48d-e0e1-4a84-9861-3a5b207868f8-operator-scripts\") pod \"root-account-create-update-rcpnr\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.253138 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km62b\" (UniqueName: \"kubernetes.io/projected/074aa48d-e0e1-4a84-9861-3a5b207868f8-kube-api-access-km62b\") pod \"root-account-create-update-rcpnr\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.355145 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km62b\" (UniqueName: \"kubernetes.io/projected/074aa48d-e0e1-4a84-9861-3a5b207868f8-kube-api-access-km62b\") pod \"root-account-create-update-rcpnr\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.355392 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/074aa48d-e0e1-4a84-9861-3a5b207868f8-operator-scripts\") pod \"root-account-create-update-rcpnr\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.356588 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/074aa48d-e0e1-4a84-9861-3a5b207868f8-operator-scripts\") pod \"root-account-create-update-rcpnr\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.384946 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km62b\" (UniqueName: \"kubernetes.io/projected/074aa48d-e0e1-4a84-9861-3a5b207868f8-kube-api-access-km62b\") pod \"root-account-create-update-rcpnr\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.471187 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:07 crc kubenswrapper[4773]: I0122 13:19:07.702443 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rcpnr"] Jan 22 13:19:08 crc kubenswrapper[4773]: I0122 13:19:08.583004 4773 generic.go:334] "Generic (PLEG): container finished" podID="074aa48d-e0e1-4a84-9861-3a5b207868f8" containerID="ab459075581e81285a4568c9e59b5306c474c0a753ed56a273e932a886a197a8" exitCode=0 Jan 22 13:19:08 crc kubenswrapper[4773]: I0122 13:19:08.583175 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rcpnr" event={"ID":"074aa48d-e0e1-4a84-9861-3a5b207868f8","Type":"ContainerDied","Data":"ab459075581e81285a4568c9e59b5306c474c0a753ed56a273e932a886a197a8"} Jan 22 13:19:08 crc kubenswrapper[4773]: I0122 13:19:08.584332 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rcpnr" event={"ID":"074aa48d-e0e1-4a84-9861-3a5b207868f8","Type":"ContainerStarted","Data":"926af2c90edc1559ebacd26c1f6ac420d22b3ed88947450a04bd0c38b5a7765c"} Jan 22 13:19:09 crc kubenswrapper[4773]: I0122 13:19:09.399827 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6w2zd"] Jan 22 13:19:09 crc kubenswrapper[4773]: I0122 13:19:09.400113 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6w2zd" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="registry-server" containerID="cri-o://e8b7765963df9c2dc296c42af3f840a7e43f7d3e51b1c7d06860ffdb8f3fb040" gracePeriod=2 Jan 22 13:19:09 crc kubenswrapper[4773]: I0122 13:19:09.599691 4773 generic.go:334] "Generic (PLEG): container finished" podID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerID="e8b7765963df9c2dc296c42af3f840a7e43f7d3e51b1c7d06860ffdb8f3fb040" exitCode=0 Jan 22 13:19:09 crc kubenswrapper[4773]: I0122 13:19:09.600161 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6w2zd" event={"ID":"4845cf42-06af-4263-99e7-3f46d106d6ce","Type":"ContainerDied","Data":"e8b7765963df9c2dc296c42af3f840a7e43f7d3e51b1c7d06860ffdb8f3fb040"} Jan 22 13:19:09 crc kubenswrapper[4773]: I0122 13:19:09.970372 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:09 crc kubenswrapper[4773]: I0122 13:19:09.978251 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.112956 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-catalog-content\") pod \"4845cf42-06af-4263-99e7-3f46d106d6ce\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.113024 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6g4t\" (UniqueName: \"kubernetes.io/projected/4845cf42-06af-4263-99e7-3f46d106d6ce-kube-api-access-v6g4t\") pod \"4845cf42-06af-4263-99e7-3f46d106d6ce\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.113071 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km62b\" (UniqueName: \"kubernetes.io/projected/074aa48d-e0e1-4a84-9861-3a5b207868f8-kube-api-access-km62b\") pod \"074aa48d-e0e1-4a84-9861-3a5b207868f8\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.113115 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/074aa48d-e0e1-4a84-9861-3a5b207868f8-operator-scripts\") pod \"074aa48d-e0e1-4a84-9861-3a5b207868f8\" (UID: \"074aa48d-e0e1-4a84-9861-3a5b207868f8\") " Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.113203 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-utilities\") pod \"4845cf42-06af-4263-99e7-3f46d106d6ce\" (UID: \"4845cf42-06af-4263-99e7-3f46d106d6ce\") " Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.116245 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-utilities" (OuterVolumeSpecName: "utilities") pod "4845cf42-06af-4263-99e7-3f46d106d6ce" (UID: "4845cf42-06af-4263-99e7-3f46d106d6ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.117117 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/074aa48d-e0e1-4a84-9861-3a5b207868f8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "074aa48d-e0e1-4a84-9861-3a5b207868f8" (UID: "074aa48d-e0e1-4a84-9861-3a5b207868f8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.118965 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/074aa48d-e0e1-4a84-9861-3a5b207868f8-kube-api-access-km62b" (OuterVolumeSpecName: "kube-api-access-km62b") pod "074aa48d-e0e1-4a84-9861-3a5b207868f8" (UID: "074aa48d-e0e1-4a84-9861-3a5b207868f8"). InnerVolumeSpecName "kube-api-access-km62b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.119110 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4845cf42-06af-4263-99e7-3f46d106d6ce-kube-api-access-v6g4t" (OuterVolumeSpecName: "kube-api-access-v6g4t") pod "4845cf42-06af-4263-99e7-3f46d106d6ce" (UID: "4845cf42-06af-4263-99e7-3f46d106d6ce"). InnerVolumeSpecName "kube-api-access-v6g4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.169594 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4845cf42-06af-4263-99e7-3f46d106d6ce" (UID: "4845cf42-06af-4263-99e7-3f46d106d6ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.214719 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.214750 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6g4t\" (UniqueName: \"kubernetes.io/projected/4845cf42-06af-4263-99e7-3f46d106d6ce-kube-api-access-v6g4t\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.214761 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km62b\" (UniqueName: \"kubernetes.io/projected/074aa48d-e0e1-4a84-9861-3a5b207868f8-kube-api-access-km62b\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.214770 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/074aa48d-e0e1-4a84-9861-3a5b207868f8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.214780 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4845cf42-06af-4263-99e7-3f46d106d6ce-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.610764 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6w2zd" event={"ID":"4845cf42-06af-4263-99e7-3f46d106d6ce","Type":"ContainerDied","Data":"11a65d149440de9b4ab26273efe28decc2f5add1d291bd5c58d1f7bd8318719b"} Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.610796 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6w2zd" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.610832 4773 scope.go:117] "RemoveContainer" containerID="e8b7765963df9c2dc296c42af3f840a7e43f7d3e51b1c7d06860ffdb8f3fb040" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.612789 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rcpnr" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.612888 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rcpnr" event={"ID":"074aa48d-e0e1-4a84-9861-3a5b207868f8","Type":"ContainerDied","Data":"926af2c90edc1559ebacd26c1f6ac420d22b3ed88947450a04bd0c38b5a7765c"} Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.612918 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="926af2c90edc1559ebacd26c1f6ac420d22b3ed88947450a04bd0c38b5a7765c" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.614985 4773 generic.go:334] "Generic (PLEG): container finished" podID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerID="1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874" exitCode=0 Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.615065 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4af4d4e7-d821-46cb-8609-c96c8521308b","Type":"ContainerDied","Data":"1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874"} Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.631458 4773 scope.go:117] "RemoveContainer" containerID="4e2376eac03a7f1be81af64f0db08e3134e3617d0ae396f20ee8969b7e4bfae1" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.773036 4773 scope.go:117] "RemoveContainer" containerID="14f192df553db38eab6a100634de38cc6a338e6c263354972586556bffcb00af" Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.794665 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6w2zd"] Jan 22 13:19:10 crc kubenswrapper[4773]: I0122 13:19:10.802057 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6w2zd"] Jan 22 13:19:11 crc kubenswrapper[4773]: I0122 13:19:11.626386 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4af4d4e7-d821-46cb-8609-c96c8521308b","Type":"ContainerStarted","Data":"58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293"} Jan 22 13:19:11 crc kubenswrapper[4773]: I0122 13:19:11.626987 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:11 crc kubenswrapper[4773]: I0122 13:19:11.655882 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.655859113 podStartE2EDuration="37.655859113s" podCreationTimestamp="2026-01-22 13:18:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:19:11.652592221 +0000 UTC m=+5059.230708036" watchObservedRunningTime="2026-01-22 13:19:11.655859113 +0000 UTC m=+5059.233974938" Jan 22 13:19:12 crc kubenswrapper[4773]: I0122 13:19:12.636118 4773 generic.go:334] "Generic (PLEG): container finished" podID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerID="538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505" exitCode=0 Jan 22 13:19:12 crc kubenswrapper[4773]: I0122 13:19:12.636203 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cfc01903-fce2-4ce5-a669-804e05bafa95","Type":"ContainerDied","Data":"538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505"} Jan 22 13:19:12 crc kubenswrapper[4773]: I0122 13:19:12.671961 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:19:12 crc kubenswrapper[4773]: E0122 13:19:12.672439 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:19:12 crc kubenswrapper[4773]: I0122 13:19:12.684362 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" path="/var/lib/kubelet/pods/4845cf42-06af-4263-99e7-3f46d106d6ce/volumes" Jan 22 13:19:13 crc kubenswrapper[4773]: I0122 13:19:13.645736 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cfc01903-fce2-4ce5-a669-804e05bafa95","Type":"ContainerStarted","Data":"440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178"} Jan 22 13:19:13 crc kubenswrapper[4773]: I0122 13:19:13.646299 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 22 13:19:13 crc kubenswrapper[4773]: I0122 13:19:13.672807 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.67277979 podStartE2EDuration="38.67277979s" podCreationTimestamp="2026-01-22 13:18:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:19:13.667421859 +0000 UTC m=+5061.245537704" watchObservedRunningTime="2026-01-22 13:19:13.67277979 +0000 UTC m=+5061.250895625" Jan 22 13:19:25 crc kubenswrapper[4773]: I0122 13:19:25.995772 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:26 crc kubenswrapper[4773]: I0122 13:19:26.569594 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 22 13:19:27 crc kubenswrapper[4773]: I0122 13:19:27.658056 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:19:27 crc kubenswrapper[4773]: E0122 13:19:27.658310 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.386225 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-699964fbc-msmv4"] Jan 22 13:19:30 crc kubenswrapper[4773]: E0122 13:19:30.386824 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="074aa48d-e0e1-4a84-9861-3a5b207868f8" containerName="mariadb-account-create-update" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.386839 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="074aa48d-e0e1-4a84-9861-3a5b207868f8" containerName="mariadb-account-create-update" Jan 22 13:19:30 crc kubenswrapper[4773]: E0122 13:19:30.386857 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="extract-content" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.386863 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="extract-content" Jan 22 13:19:30 crc kubenswrapper[4773]: E0122 13:19:30.386877 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="extract-utilities" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.386884 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="extract-utilities" Jan 22 13:19:30 crc kubenswrapper[4773]: E0122 13:19:30.386896 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="registry-server" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.386902 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="registry-server" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.387058 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="074aa48d-e0e1-4a84-9861-3a5b207868f8" containerName="mariadb-account-create-update" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.387078 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4845cf42-06af-4263-99e7-3f46d106d6ce" containerName="registry-server" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.387945 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.405061 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-msmv4"] Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.527103 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-dns-svc\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.527173 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-config\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.528912 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8bgm\" (UniqueName: \"kubernetes.io/projected/c8803480-c4a7-4eee-ac77-a6d9838af931-kube-api-access-f8bgm\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.630228 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-dns-svc\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.630370 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-config\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.630490 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8bgm\" (UniqueName: \"kubernetes.io/projected/c8803480-c4a7-4eee-ac77-a6d9838af931-kube-api-access-f8bgm\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.631224 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-dns-svc\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.631372 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-config\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.655313 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8bgm\" (UniqueName: \"kubernetes.io/projected/c8803480-c4a7-4eee-ac77-a6d9838af931-kube-api-access-f8bgm\") pod \"dnsmasq-dns-699964fbc-msmv4\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:30 crc kubenswrapper[4773]: I0122 13:19:30.711955 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:31 crc kubenswrapper[4773]: I0122 13:19:31.114876 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:19:31 crc kubenswrapper[4773]: I0122 13:19:31.244013 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-msmv4"] Jan 22 13:19:31 crc kubenswrapper[4773]: I0122 13:19:31.751621 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:19:31 crc kubenswrapper[4773]: I0122 13:19:31.798181 4773 generic.go:334] "Generic (PLEG): container finished" podID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerID="fb052a5190d910ebc6abffeb7f24307f0c8744122b6dd1c08f6892358f98385b" exitCode=0 Jan 22 13:19:31 crc kubenswrapper[4773]: I0122 13:19:31.798232 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-msmv4" event={"ID":"c8803480-c4a7-4eee-ac77-a6d9838af931","Type":"ContainerDied","Data":"fb052a5190d910ebc6abffeb7f24307f0c8744122b6dd1c08f6892358f98385b"} Jan 22 13:19:31 crc kubenswrapper[4773]: I0122 13:19:31.798265 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-msmv4" event={"ID":"c8803480-c4a7-4eee-ac77-a6d9838af931","Type":"ContainerStarted","Data":"2e3eefb65927ae0358ae0571853995b3ed6ff42e27fe217e85192de1950f18b7"} Jan 22 13:19:32 crc kubenswrapper[4773]: I0122 13:19:32.808177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-msmv4" event={"ID":"c8803480-c4a7-4eee-ac77-a6d9838af931","Type":"ContainerStarted","Data":"7b7ad8b0e4ed026a75819ac37467ffc8b0ea9b0edb15ed266bb7369204df7860"} Jan 22 13:19:32 crc kubenswrapper[4773]: I0122 13:19:32.808438 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:32 crc kubenswrapper[4773]: I0122 13:19:32.826399 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-699964fbc-msmv4" podStartSLOduration=2.8263717010000002 podStartE2EDuration="2.826371701s" podCreationTimestamp="2026-01-22 13:19:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:19:32.82101412 +0000 UTC m=+5080.399129945" watchObservedRunningTime="2026-01-22 13:19:32.826371701 +0000 UTC m=+5080.404487526" Jan 22 13:19:35 crc kubenswrapper[4773]: I0122 13:19:35.477335 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="rabbitmq" containerID="cri-o://440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178" gracePeriod=604796 Jan 22 13:19:36 crc kubenswrapper[4773]: I0122 13:19:36.055431 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerName="rabbitmq" containerID="cri-o://58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293" gracePeriod=604796 Jan 22 13:19:36 crc kubenswrapper[4773]: I0122 13:19:36.565088 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.252:5671: connect: connection refused" Jan 22 13:19:40 crc kubenswrapper[4773]: I0122 13:19:40.659236 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:19:40 crc kubenswrapper[4773]: E0122 13:19:40.660339 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:19:40 crc kubenswrapper[4773]: I0122 13:19:40.713546 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:19:40 crc kubenswrapper[4773]: I0122 13:19:40.767109 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-c256q"] Jan 22 13:19:40 crc kubenswrapper[4773]: I0122 13:19:40.767451 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" podUID="090e1ede-1784-49b2-9f35-400070db2926" containerName="dnsmasq-dns" containerID="cri-o://3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d" gracePeriod=10 Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.788315 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.896866 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-config\") pod \"090e1ede-1784-49b2-9f35-400070db2926\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.896977 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-dns-svc\") pod \"090e1ede-1784-49b2-9f35-400070db2926\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.897038 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgqd7\" (UniqueName: \"kubernetes.io/projected/090e1ede-1784-49b2-9f35-400070db2926-kube-api-access-hgqd7\") pod \"090e1ede-1784-49b2-9f35-400070db2926\" (UID: \"090e1ede-1784-49b2-9f35-400070db2926\") " Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.900967 4773 generic.go:334] "Generic (PLEG): container finished" podID="090e1ede-1784-49b2-9f35-400070db2926" containerID="3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d" exitCode=0 Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.901046 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.901063 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" event={"ID":"090e1ede-1784-49b2-9f35-400070db2926","Type":"ContainerDied","Data":"3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d"} Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.901137 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-c256q" event={"ID":"090e1ede-1784-49b2-9f35-400070db2926","Type":"ContainerDied","Data":"b0044ffc18606d16b60601653287fe4d0f98fc86b96a4bb557eb0716bbb14200"} Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.901170 4773 scope.go:117] "RemoveContainer" containerID="3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d" Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.902760 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/090e1ede-1784-49b2-9f35-400070db2926-kube-api-access-hgqd7" (OuterVolumeSpecName: "kube-api-access-hgqd7") pod "090e1ede-1784-49b2-9f35-400070db2926" (UID: "090e1ede-1784-49b2-9f35-400070db2926"). InnerVolumeSpecName "kube-api-access-hgqd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.937432 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-config" (OuterVolumeSpecName: "config") pod "090e1ede-1784-49b2-9f35-400070db2926" (UID: "090e1ede-1784-49b2-9f35-400070db2926"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.941795 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "090e1ede-1784-49b2-9f35-400070db2926" (UID: "090e1ede-1784-49b2-9f35-400070db2926"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:41 crc kubenswrapper[4773]: I0122 13:19:41.994569 4773 scope.go:117] "RemoveContainer" containerID="5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.005882 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.005921 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgqd7\" (UniqueName: \"kubernetes.io/projected/090e1ede-1784-49b2-9f35-400070db2926-kube-api-access-hgqd7\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.005934 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/090e1ede-1784-49b2-9f35-400070db2926-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.012201 4773 scope.go:117] "RemoveContainer" containerID="3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d" Jan 22 13:19:42 crc kubenswrapper[4773]: E0122 13:19:42.012883 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d\": container with ID starting with 3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d not found: ID does not exist" containerID="3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.012963 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d"} err="failed to get container status \"3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d\": rpc error: code = NotFound desc = could not find container \"3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d\": container with ID starting with 3be9d14748b93a50eefd07a1320d027a21f364a3ecadadacd9efb9857c72a76d not found: ID does not exist" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.013001 4773 scope.go:117] "RemoveContainer" containerID="5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7" Jan 22 13:19:42 crc kubenswrapper[4773]: E0122 13:19:42.013398 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7\": container with ID starting with 5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7 not found: ID does not exist" containerID="5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.013427 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7"} err="failed to get container status \"5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7\": rpc error: code = NotFound desc = could not find container \"5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7\": container with ID starting with 5acb4ea8fa4b2f0bfa3ebc61da4fb84325c9ac899c8ca92de25ce470758bf1e7 not found: ID does not exist" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.234730 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-c256q"] Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.239517 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-c256q"] Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.670561 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="090e1ede-1784-49b2-9f35-400070db2926" path="/var/lib/kubelet/pods/090e1ede-1784-49b2-9f35-400070db2926/volumes" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.696595 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.819991 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cfc01903-fce2-4ce5-a669-804e05bafa95-pod-info\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820074 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-server-conf\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820149 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-plugins\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820169 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-config-data\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820208 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-erlang-cookie\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820246 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-tls\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820276 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlqjw\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-kube-api-access-nlqjw\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820391 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-confd\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820614 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820655 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cfc01903-fce2-4ce5-a669-804e05bafa95-erlang-cookie-secret\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.820702 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-plugins-conf\") pod \"cfc01903-fce2-4ce5-a669-804e05bafa95\" (UID: \"cfc01903-fce2-4ce5-a669-804e05bafa95\") " Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.821732 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.821863 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.823791 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.835252 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-kube-api-access-nlqjw" (OuterVolumeSpecName: "kube-api-access-nlqjw") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "kube-api-access-nlqjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.842484 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.860819 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cfc01903-fce2-4ce5-a669-804e05bafa95-pod-info" (OuterVolumeSpecName: "pod-info") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.877502 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc01903-fce2-4ce5-a669-804e05bafa95-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.922452 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.923270 4773 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cfc01903-fce2-4ce5-a669-804e05bafa95-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933430 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933452 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933464 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933479 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlqjw\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-kube-api-access-nlqjw\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933491 4773 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cfc01903-fce2-4ce5-a669-804e05bafa95-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933503 4773 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.933839 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-config-data" (OuterVolumeSpecName: "config-data") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.939566 4773 generic.go:334] "Generic (PLEG): container finished" podID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerID="440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178" exitCode=0 Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.939665 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cfc01903-fce2-4ce5-a669-804e05bafa95","Type":"ContainerDied","Data":"440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178"} Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.939695 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"cfc01903-fce2-4ce5-a669-804e05bafa95","Type":"ContainerDied","Data":"424c3566acb93c2ab0643d6063736d239efd5a305a3cca0bb4f978c052d70ca5"} Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.939713 4773 scope.go:117] "RemoveContainer" containerID="440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.939821 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.964391 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6" (OuterVolumeSpecName: "persistence") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.964859 4773 generic.go:334] "Generic (PLEG): container finished" podID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerID="58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293" exitCode=0 Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.965399 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.965837 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4af4d4e7-d821-46cb-8609-c96c8521308b","Type":"ContainerDied","Data":"58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293"} Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.965888 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"4af4d4e7-d821-46cb-8609-c96c8521308b","Type":"ContainerDied","Data":"407679c7233bc47fccffc61ef2da34098d9058d6b039b705180a1b76fa8c8276"} Jan 22 13:19:42 crc kubenswrapper[4773]: I0122 13:19:42.984473 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-server-conf" (OuterVolumeSpecName: "server-conf") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.035790 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-config-data\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.035842 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-tls\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.035869 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-plugins\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036074 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036178 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-erlang-cookie\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036216 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-server-conf\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036235 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4af4d4e7-d821-46cb-8609-c96c8521308b-pod-info\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036314 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-plugins-conf\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036339 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-confd\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036364 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4af4d4e7-d821-46cb-8609-c96c8521308b-erlang-cookie-secret\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036406 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k86b\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-kube-api-access-5k86b\") pod \"4af4d4e7-d821-46cb-8609-c96c8521308b\" (UID: \"4af4d4e7-d821-46cb-8609-c96c8521308b\") " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036629 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036754 4773 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036773 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cfc01903-fce2-4ce5-a669-804e05bafa95-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036786 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.036832 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") on node \"crc\" " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.039791 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.062834 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.063591 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4af4d4e7-d821-46cb-8609-c96c8521308b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.063909 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.089558 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4af4d4e7-d821-46cb-8609-c96c8521308b-pod-info" (OuterVolumeSpecName: "pod-info") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.089582 4773 scope.go:117] "RemoveContainer" containerID="538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.089740 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-kube-api-access-5k86b" (OuterVolumeSpecName: "kube-api-access-5k86b") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "kube-api-access-5k86b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.103479 4773 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.103644 4773 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6") on node "crc" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.121323 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-config-data" (OuterVolumeSpecName: "config-data") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.127699 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b" (OuterVolumeSpecName: "persistence") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "pvc-057d2589-ccf8-41c4-833a-de31f025f26b". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139267 4773 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139322 4773 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4af4d4e7-d821-46cb-8609-c96c8521308b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139343 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k86b\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-kube-api-access-5k86b\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139357 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139367 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139382 4773 reconciler_common.go:293] "Volume detached for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139415 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") on node \"crc\" " Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139430 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.139441 4773 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4af4d4e7-d821-46cb-8609-c96c8521308b-pod-info\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.145783 4773 scope.go:117] "RemoveContainer" containerID="440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.146471 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178\": container with ID starting with 440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178 not found: ID does not exist" containerID="440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.146546 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178"} err="failed to get container status \"440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178\": rpc error: code = NotFound desc = could not find container \"440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178\": container with ID starting with 440893ba46b0adfb68ff350df88a7c5b7ce675a5eb6ac1e05f4d17b5a1cd1178 not found: ID does not exist" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.146587 4773 scope.go:117] "RemoveContainer" containerID="538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.147471 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505\": container with ID starting with 538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505 not found: ID does not exist" containerID="538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.147532 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505"} err="failed to get container status \"538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505\": rpc error: code = NotFound desc = could not find container \"538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505\": container with ID starting with 538e32fa72f826b6e67be54e198a25927b8ca9ae8d797c21776a9a5b4f0e6505 not found: ID does not exist" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.147553 4773 scope.go:117] "RemoveContainer" containerID="58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.152879 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-server-conf" (OuterVolumeSpecName: "server-conf") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.171532 4773 scope.go:117] "RemoveContainer" containerID="1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.171545 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cfc01903-fce2-4ce5-a669-804e05bafa95" (UID: "cfc01903-fce2-4ce5-a669-804e05bafa95"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.174149 4773 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.174273 4773 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-057d2589-ccf8-41c4-833a-de31f025f26b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b") on node "crc" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.175837 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4af4d4e7-d821-46cb-8609-c96c8521308b" (UID: "4af4d4e7-d821-46cb-8609-c96c8521308b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.187194 4773 scope.go:117] "RemoveContainer" containerID="58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.187790 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293\": container with ID starting with 58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293 not found: ID does not exist" containerID="58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.187897 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293"} err="failed to get container status \"58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293\": rpc error: code = NotFound desc = could not find container \"58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293\": container with ID starting with 58d59290253ce6fa99d03ae9a4571d58c97f3d4238863368118310c2eac12293 not found: ID does not exist" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.187992 4773 scope.go:117] "RemoveContainer" containerID="1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.188587 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874\": container with ID starting with 1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874 not found: ID does not exist" containerID="1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.188685 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874"} err="failed to get container status \"1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874\": rpc error: code = NotFound desc = could not find container \"1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874\": container with ID starting with 1f6071ed6573416f9a0afb8ac9a69995e48c0bef9503f90c3e1502a753aac874 not found: ID does not exist" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.240377 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4af4d4e7-d821-46cb-8609-c96c8521308b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.240422 4773 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cfc01903-fce2-4ce5-a669-804e05bafa95-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.240436 4773 reconciler_common.go:293] "Volume detached for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.240448 4773 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4af4d4e7-d821-46cb-8609-c96c8521308b-server-conf\") on node \"crc\" DevicePath \"\"" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.269023 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.275790 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316249 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.316653 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="setup-container" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316679 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="setup-container" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.316704 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerName="rabbitmq" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316715 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerName="rabbitmq" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.316730 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="090e1ede-1784-49b2-9f35-400070db2926" containerName="dnsmasq-dns" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316739 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="090e1ede-1784-49b2-9f35-400070db2926" containerName="dnsmasq-dns" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.316757 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="090e1ede-1784-49b2-9f35-400070db2926" containerName="init" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316769 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="090e1ede-1784-49b2-9f35-400070db2926" containerName="init" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.316790 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerName="setup-container" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316800 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerName="setup-container" Jan 22 13:19:43 crc kubenswrapper[4773]: E0122 13:19:43.316829 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="rabbitmq" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.316838 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="rabbitmq" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.317032 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" containerName="rabbitmq" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.317062 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" containerName="rabbitmq" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.317076 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="090e1ede-1784-49b2-9f35-400070db2926" containerName="dnsmasq-dns" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.318083 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.324555 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.324766 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.324911 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.325062 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.325210 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.329612 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-2l49c" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.329842 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.330064 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.339771 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.347921 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.366184 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.367460 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.372345 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.372516 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qxm6s" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.372693 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.372941 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.373300 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.374169 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.386208 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.397317 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442688 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442734 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-config-data\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442783 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442807 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442831 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442900 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442939 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442969 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.442995 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443019 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1f14d87-f341-4c16-900b-7aa0878c9a84-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443109 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4vsg\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-kube-api-access-q4vsg\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443152 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443209 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443275 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443358 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443386 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443404 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443451 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpv54\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-kube-api-access-lpv54\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443473 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443505 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443528 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1f14d87-f341-4c16-900b-7aa0878c9a84-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.443550 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545428 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpv54\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-kube-api-access-lpv54\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545480 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545511 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545527 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1f14d87-f341-4c16-900b-7aa0878c9a84-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545582 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545601 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-config-data\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545639 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545682 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545697 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545717 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545734 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545756 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545779 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545796 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1f14d87-f341-4c16-900b-7aa0878c9a84-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545816 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4vsg\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-kube-api-access-q4vsg\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545837 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545875 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545896 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545916 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.545930 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.546228 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.546233 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.546783 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.546880 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-config-data\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.546972 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.547215 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.547245 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.547568 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.547916 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c1f14d87-f341-4c16-900b-7aa0878c9a84-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.549418 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.550550 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.551244 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.551738 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c1f14d87-f341-4c16-900b-7aa0878c9a84-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.552515 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.552570 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4ed1801c2b645a90d9988960310f81c674791f1ebaa9a5a8d5281bd95a824772/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.552737 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.552825 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6b48504644dee6ae822cbe8bfe746e4b3fd51178841c3f8235344fb47eab87d0/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.552952 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.553865 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.555402 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.555732 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.559883 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c1f14d87-f341-4c16-900b-7aa0878c9a84-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.563589 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpv54\" (UniqueName: \"kubernetes.io/projected/bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf-kube-api-access-lpv54\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.567370 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4vsg\" (UniqueName: \"kubernetes.io/projected/c1f14d87-f341-4c16-900b-7aa0878c9a84-kube-api-access-q4vsg\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.579807 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-057d2589-ccf8-41c4-833a-de31f025f26b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-057d2589-ccf8-41c4-833a-de31f025f26b\") pod \"rabbitmq-cell1-server-0\" (UID: \"c1f14d87-f341-4c16-900b-7aa0878c9a84\") " pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.583147 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-504debee-ff9e-4c50-bd9d-f0197610c5d6\") pod \"rabbitmq-server-0\" (UID: \"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf\") " pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.664124 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 22 13:19:43 crc kubenswrapper[4773]: I0122 13:19:43.690072 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:19:44 crc kubenswrapper[4773]: I0122 13:19:44.145257 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 22 13:19:44 crc kubenswrapper[4773]: I0122 13:19:44.150219 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 22 13:19:44 crc kubenswrapper[4773]: I0122 13:19:44.668320 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4af4d4e7-d821-46cb-8609-c96c8521308b" path="/var/lib/kubelet/pods/4af4d4e7-d821-46cb-8609-c96c8521308b/volumes" Jan 22 13:19:44 crc kubenswrapper[4773]: I0122 13:19:44.670434 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfc01903-fce2-4ce5-a669-804e05bafa95" path="/var/lib/kubelet/pods/cfc01903-fce2-4ce5-a669-804e05bafa95/volumes" Jan 22 13:19:45 crc kubenswrapper[4773]: I0122 13:19:45.011573 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1f14d87-f341-4c16-900b-7aa0878c9a84","Type":"ContainerStarted","Data":"3038b3723b47d96508b0ffaf2d2f78b0adbb1e373874ab8aadc538598d212d64"} Jan 22 13:19:45 crc kubenswrapper[4773]: I0122 13:19:45.013077 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf","Type":"ContainerStarted","Data":"f71d041e54d01deffdc85dafa12047bfa29ab0e2cb471e5d38f7536a8643a733"} Jan 22 13:19:46 crc kubenswrapper[4773]: I0122 13:19:46.024175 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1f14d87-f341-4c16-900b-7aa0878c9a84","Type":"ContainerStarted","Data":"f91af5e3c9f5386727fcf592e29d93cff482ba3a784c4bb19a50d8b9a10a85ee"} Jan 22 13:19:46 crc kubenswrapper[4773]: I0122 13:19:46.026219 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf","Type":"ContainerStarted","Data":"c5d2d1c385f3735947fcb3faafb2af1062ce3caaf9774032fc15604124aaf153"} Jan 22 13:19:53 crc kubenswrapper[4773]: I0122 13:19:53.659820 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:19:53 crc kubenswrapper[4773]: E0122 13:19:53.660666 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:20:08 crc kubenswrapper[4773]: I0122 13:20:08.659310 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:20:08 crc kubenswrapper[4773]: E0122 13:20:08.660072 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:20:18 crc kubenswrapper[4773]: I0122 13:20:18.295733 4773 generic.go:334] "Generic (PLEG): container finished" podID="c1f14d87-f341-4c16-900b-7aa0878c9a84" containerID="f91af5e3c9f5386727fcf592e29d93cff482ba3a784c4bb19a50d8b9a10a85ee" exitCode=0 Jan 22 13:20:18 crc kubenswrapper[4773]: I0122 13:20:18.295841 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1f14d87-f341-4c16-900b-7aa0878c9a84","Type":"ContainerDied","Data":"f91af5e3c9f5386727fcf592e29d93cff482ba3a784c4bb19a50d8b9a10a85ee"} Jan 22 13:20:18 crc kubenswrapper[4773]: I0122 13:20:18.298844 4773 generic.go:334] "Generic (PLEG): container finished" podID="bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf" containerID="c5d2d1c385f3735947fcb3faafb2af1062ce3caaf9774032fc15604124aaf153" exitCode=0 Jan 22 13:20:18 crc kubenswrapper[4773]: I0122 13:20:18.298918 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf","Type":"ContainerDied","Data":"c5d2d1c385f3735947fcb3faafb2af1062ce3caaf9774032fc15604124aaf153"} Jan 22 13:20:19 crc kubenswrapper[4773]: I0122 13:20:19.307272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf","Type":"ContainerStarted","Data":"b2904826495d0353d0a6806aeb16e1305ce1b214c6747c0ce9779b7fe1a64a60"} Jan 22 13:20:19 crc kubenswrapper[4773]: I0122 13:20:19.309476 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c1f14d87-f341-4c16-900b-7aa0878c9a84","Type":"ContainerStarted","Data":"01636cbf5ea2ad22784620fdd29c0119193ffd0f1a158cac98ee101568342860"} Jan 22 13:20:19 crc kubenswrapper[4773]: I0122 13:20:19.309743 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:20:19 crc kubenswrapper[4773]: I0122 13:20:19.333825 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.333805722 podStartE2EDuration="36.333805722s" podCreationTimestamp="2026-01-22 13:19:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:20:19.330223041 +0000 UTC m=+5126.908338886" watchObservedRunningTime="2026-01-22 13:20:19.333805722 +0000 UTC m=+5126.911921547" Jan 22 13:20:19 crc kubenswrapper[4773]: I0122 13:20:19.364090 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.364064766 podStartE2EDuration="36.364064766s" podCreationTimestamp="2026-01-22 13:19:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:20:19.358768316 +0000 UTC m=+5126.936884141" watchObservedRunningTime="2026-01-22 13:20:19.364064766 +0000 UTC m=+5126.942180591" Jan 22 13:20:23 crc kubenswrapper[4773]: I0122 13:20:23.657997 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:20:23 crc kubenswrapper[4773]: E0122 13:20:23.658765 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:20:23 crc kubenswrapper[4773]: I0122 13:20:23.664309 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 22 13:20:33 crc kubenswrapper[4773]: I0122 13:20:33.667515 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 22 13:20:33 crc kubenswrapper[4773]: I0122 13:20:33.693526 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.393880 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.395424 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.401200 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-b9zg7" Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.403914 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.507369 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxd7g\" (UniqueName: \"kubernetes.io/projected/227c760b-550c-499c-acbf-c60943cdb080-kube-api-access-cxd7g\") pod \"mariadb-client\" (UID: \"227c760b-550c-499c-acbf-c60943cdb080\") " pod="openstack/mariadb-client" Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.609795 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxd7g\" (UniqueName: \"kubernetes.io/projected/227c760b-550c-499c-acbf-c60943cdb080-kube-api-access-cxd7g\") pod \"mariadb-client\" (UID: \"227c760b-550c-499c-acbf-c60943cdb080\") " pod="openstack/mariadb-client" Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.629335 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxd7g\" (UniqueName: \"kubernetes.io/projected/227c760b-550c-499c-acbf-c60943cdb080-kube-api-access-cxd7g\") pod \"mariadb-client\" (UID: \"227c760b-550c-499c-acbf-c60943cdb080\") " pod="openstack/mariadb-client" Jan 22 13:20:37 crc kubenswrapper[4773]: I0122 13:20:37.712877 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:20:38 crc kubenswrapper[4773]: I0122 13:20:38.231416 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:20:38 crc kubenswrapper[4773]: I0122 13:20:38.249582 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:20:38 crc kubenswrapper[4773]: I0122 13:20:38.473218 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"227c760b-550c-499c-acbf-c60943cdb080","Type":"ContainerStarted","Data":"1fa5aeeb3dc03df8e1272a125e1b3e681022dac90607f476348cf2100178301d"} Jan 22 13:20:38 crc kubenswrapper[4773]: I0122 13:20:38.657667 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:20:38 crc kubenswrapper[4773]: E0122 13:20:38.658381 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:20:39 crc kubenswrapper[4773]: I0122 13:20:39.480855 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"227c760b-550c-499c-acbf-c60943cdb080","Type":"ContainerStarted","Data":"f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5"} Jan 22 13:20:39 crc kubenswrapper[4773]: I0122 13:20:39.495172 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client" podStartSLOduration=1.9701554479999999 podStartE2EDuration="2.495151695s" podCreationTimestamp="2026-01-22 13:20:37 +0000 UTC" firstStartedPulling="2026-01-22 13:20:38.249377094 +0000 UTC m=+5145.827492919" lastFinishedPulling="2026-01-22 13:20:38.774373351 +0000 UTC m=+5146.352489166" observedRunningTime="2026-01-22 13:20:39.49246424 +0000 UTC m=+5147.070580085" watchObservedRunningTime="2026-01-22 13:20:39.495151695 +0000 UTC m=+5147.073267520" Jan 22 13:20:51 crc kubenswrapper[4773]: I0122 13:20:51.361484 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:20:51 crc kubenswrapper[4773]: I0122 13:20:51.362343 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-client" podUID="227c760b-550c-499c-acbf-c60943cdb080" containerName="mariadb-client" containerID="cri-o://f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5" gracePeriod=30 Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.324815 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.360048 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxd7g\" (UniqueName: \"kubernetes.io/projected/227c760b-550c-499c-acbf-c60943cdb080-kube-api-access-cxd7g\") pod \"227c760b-550c-499c-acbf-c60943cdb080\" (UID: \"227c760b-550c-499c-acbf-c60943cdb080\") " Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.367830 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227c760b-550c-499c-acbf-c60943cdb080-kube-api-access-cxd7g" (OuterVolumeSpecName: "kube-api-access-cxd7g") pod "227c760b-550c-499c-acbf-c60943cdb080" (UID: "227c760b-550c-499c-acbf-c60943cdb080"). InnerVolumeSpecName "kube-api-access-cxd7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.461856 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxd7g\" (UniqueName: \"kubernetes.io/projected/227c760b-550c-499c-acbf-c60943cdb080-kube-api-access-cxd7g\") on node \"crc\" DevicePath \"\"" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.600979 4773 generic.go:334] "Generic (PLEG): container finished" podID="227c760b-550c-499c-acbf-c60943cdb080" containerID="f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5" exitCode=143 Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.601055 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"227c760b-550c-499c-acbf-c60943cdb080","Type":"ContainerDied","Data":"f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5"} Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.601164 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"227c760b-550c-499c-acbf-c60943cdb080","Type":"ContainerDied","Data":"1fa5aeeb3dc03df8e1272a125e1b3e681022dac90607f476348cf2100178301d"} Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.601171 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.602079 4773 scope.go:117] "RemoveContainer" containerID="f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.625067 4773 scope.go:117] "RemoveContainer" containerID="f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5" Jan 22 13:20:52 crc kubenswrapper[4773]: E0122 13:20:52.625466 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5\": container with ID starting with f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5 not found: ID does not exist" containerID="f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.625511 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5"} err="failed to get container status \"f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5\": rpc error: code = NotFound desc = could not find container \"f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5\": container with ID starting with f16a142ce50d3980ca3a7ef467f85baaf399eb9b2e1e3bfbb3533b4d1e90c9c5 not found: ID does not exist" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.647493 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.653881 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.662947 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:20:52 crc kubenswrapper[4773]: E0122 13:20:52.663238 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:20:52 crc kubenswrapper[4773]: I0122 13:20:52.670737 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="227c760b-550c-499c-acbf-c60943cdb080" path="/var/lib/kubelet/pods/227c760b-550c-499c-acbf-c60943cdb080/volumes" Jan 22 13:21:04 crc kubenswrapper[4773]: I0122 13:21:04.658111 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:21:04 crc kubenswrapper[4773]: E0122 13:21:04.659065 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:21:17 crc kubenswrapper[4773]: I0122 13:21:17.659124 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:21:17 crc kubenswrapper[4773]: E0122 13:21:17.659914 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:21:32 crc kubenswrapper[4773]: I0122 13:21:32.668821 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:21:32 crc kubenswrapper[4773]: E0122 13:21:32.669693 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.656876 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bhwmz"] Jan 22 13:21:44 crc kubenswrapper[4773]: E0122 13:21:44.657729 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227c760b-550c-499c-acbf-c60943cdb080" containerName="mariadb-client" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.657749 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="227c760b-550c-499c-acbf-c60943cdb080" containerName="mariadb-client" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.657948 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="227c760b-550c-499c-acbf-c60943cdb080" containerName="mariadb-client" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.659571 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.670177 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bhwmz"] Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.708476 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-utilities\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.709272 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-catalog-content\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.709337 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5n9z\" (UniqueName: \"kubernetes.io/projected/c9ff8365-176d-4f11-b89e-4387db0b6287-kube-api-access-l5n9z\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.810549 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-utilities\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.810653 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-catalog-content\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.810686 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5n9z\" (UniqueName: \"kubernetes.io/projected/c9ff8365-176d-4f11-b89e-4387db0b6287-kube-api-access-l5n9z\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.811307 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-utilities\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.811635 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-catalog-content\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.830026 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5n9z\" (UniqueName: \"kubernetes.io/projected/c9ff8365-176d-4f11-b89e-4387db0b6287-kube-api-access-l5n9z\") pod \"redhat-operators-bhwmz\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:44 crc kubenswrapper[4773]: I0122 13:21:44.991267 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:45 crc kubenswrapper[4773]: I0122 13:21:45.433258 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bhwmz"] Jan 22 13:21:46 crc kubenswrapper[4773]: I0122 13:21:46.086250 4773 generic.go:334] "Generic (PLEG): container finished" podID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerID="67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60" exitCode=0 Jan 22 13:21:46 crc kubenswrapper[4773]: I0122 13:21:46.086311 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerDied","Data":"67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60"} Jan 22 13:21:46 crc kubenswrapper[4773]: I0122 13:21:46.086346 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerStarted","Data":"c656148ca759e46c36b50e4768d8a84a8f8d67e0f4cd1b41dcae5e7b3bb47127"} Jan 22 13:21:46 crc kubenswrapper[4773]: I0122 13:21:46.657825 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:21:46 crc kubenswrapper[4773]: E0122 13:21:46.658176 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:21:49 crc kubenswrapper[4773]: I0122 13:21:49.251011 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerStarted","Data":"4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff"} Jan 22 13:21:51 crc kubenswrapper[4773]: I0122 13:21:51.271135 4773 generic.go:334] "Generic (PLEG): container finished" podID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerID="4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff" exitCode=0 Jan 22 13:21:51 crc kubenswrapper[4773]: I0122 13:21:51.271191 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerDied","Data":"4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff"} Jan 22 13:21:52 crc kubenswrapper[4773]: I0122 13:21:52.283978 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerStarted","Data":"d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257"} Jan 22 13:21:52 crc kubenswrapper[4773]: I0122 13:21:52.332825 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bhwmz" podStartSLOduration=2.66087409 podStartE2EDuration="8.332756969s" podCreationTimestamp="2026-01-22 13:21:44 +0000 UTC" firstStartedPulling="2026-01-22 13:21:46.087745923 +0000 UTC m=+5213.665861748" lastFinishedPulling="2026-01-22 13:21:51.759628802 +0000 UTC m=+5219.337744627" observedRunningTime="2026-01-22 13:21:52.314166314 +0000 UTC m=+5219.892282169" watchObservedRunningTime="2026-01-22 13:21:52.332756969 +0000 UTC m=+5219.910872844" Jan 22 13:21:54 crc kubenswrapper[4773]: I0122 13:21:54.991761 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:54 crc kubenswrapper[4773]: I0122 13:21:54.993049 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:21:56 crc kubenswrapper[4773]: I0122 13:21:56.038279 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bhwmz" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="registry-server" probeResult="failure" output=< Jan 22 13:21:56 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:21:56 crc kubenswrapper[4773]: > Jan 22 13:21:57 crc kubenswrapper[4773]: I0122 13:21:57.658446 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:21:57 crc kubenswrapper[4773]: E0122 13:21:57.658715 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:22:05 crc kubenswrapper[4773]: I0122 13:22:05.036165 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:22:05 crc kubenswrapper[4773]: I0122 13:22:05.085128 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:22:05 crc kubenswrapper[4773]: I0122 13:22:05.272842 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bhwmz"] Jan 22 13:22:06 crc kubenswrapper[4773]: I0122 13:22:06.822364 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bhwmz" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="registry-server" containerID="cri-o://d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257" gracePeriod=2 Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.250754 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.381445 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-catalog-content\") pod \"c9ff8365-176d-4f11-b89e-4387db0b6287\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.381876 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5n9z\" (UniqueName: \"kubernetes.io/projected/c9ff8365-176d-4f11-b89e-4387db0b6287-kube-api-access-l5n9z\") pod \"c9ff8365-176d-4f11-b89e-4387db0b6287\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.382102 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-utilities\") pod \"c9ff8365-176d-4f11-b89e-4387db0b6287\" (UID: \"c9ff8365-176d-4f11-b89e-4387db0b6287\") " Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.382873 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-utilities" (OuterVolumeSpecName: "utilities") pod "c9ff8365-176d-4f11-b89e-4387db0b6287" (UID: "c9ff8365-176d-4f11-b89e-4387db0b6287"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.387557 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9ff8365-176d-4f11-b89e-4387db0b6287-kube-api-access-l5n9z" (OuterVolumeSpecName: "kube-api-access-l5n9z") pod "c9ff8365-176d-4f11-b89e-4387db0b6287" (UID: "c9ff8365-176d-4f11-b89e-4387db0b6287"). InnerVolumeSpecName "kube-api-access-l5n9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.485122 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.485160 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5n9z\" (UniqueName: \"kubernetes.io/projected/c9ff8365-176d-4f11-b89e-4387db0b6287-kube-api-access-l5n9z\") on node \"crc\" DevicePath \"\"" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.545351 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c9ff8365-176d-4f11-b89e-4387db0b6287" (UID: "c9ff8365-176d-4f11-b89e-4387db0b6287"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.586305 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9ff8365-176d-4f11-b89e-4387db0b6287-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.834317 4773 generic.go:334] "Generic (PLEG): container finished" podID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerID="d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257" exitCode=0 Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.834381 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerDied","Data":"d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257"} Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.834421 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bhwmz" event={"ID":"c9ff8365-176d-4f11-b89e-4387db0b6287","Type":"ContainerDied","Data":"c656148ca759e46c36b50e4768d8a84a8f8d67e0f4cd1b41dcae5e7b3bb47127"} Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.834445 4773 scope.go:117] "RemoveContainer" containerID="d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.834474 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bhwmz" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.857530 4773 scope.go:117] "RemoveContainer" containerID="4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.880436 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bhwmz"] Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.886449 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bhwmz"] Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.907069 4773 scope.go:117] "RemoveContainer" containerID="67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.928964 4773 scope.go:117] "RemoveContainer" containerID="d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257" Jan 22 13:22:07 crc kubenswrapper[4773]: E0122 13:22:07.929583 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257\": container with ID starting with d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257 not found: ID does not exist" containerID="d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.929638 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257"} err="failed to get container status \"d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257\": rpc error: code = NotFound desc = could not find container \"d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257\": container with ID starting with d5d1e42ee15a343e39ef78ec2318fc6862b5a9e9498171f74e822c932876d257 not found: ID does not exist" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.929671 4773 scope.go:117] "RemoveContainer" containerID="4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff" Jan 22 13:22:07 crc kubenswrapper[4773]: E0122 13:22:07.929996 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff\": container with ID starting with 4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff not found: ID does not exist" containerID="4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.930024 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff"} err="failed to get container status \"4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff\": rpc error: code = NotFound desc = could not find container \"4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff\": container with ID starting with 4224b54efff6d5d8d60012fe220fc7ea6ee8d99ff712435c7918c4dcf41073ff not found: ID does not exist" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.930041 4773 scope.go:117] "RemoveContainer" containerID="67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60" Jan 22 13:22:07 crc kubenswrapper[4773]: E0122 13:22:07.930323 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60\": container with ID starting with 67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60 not found: ID does not exist" containerID="67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60" Jan 22 13:22:07 crc kubenswrapper[4773]: I0122 13:22:07.930351 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60"} err="failed to get container status \"67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60\": rpc error: code = NotFound desc = could not find container \"67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60\": container with ID starting with 67b2388e76ef0db2df36ab17a30f50ff8d43fcaa9dea6460e09e4290e1018c60 not found: ID does not exist" Jan 22 13:22:08 crc kubenswrapper[4773]: I0122 13:22:08.667819 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" path="/var/lib/kubelet/pods/c9ff8365-176d-4f11-b89e-4387db0b6287/volumes" Jan 22 13:22:10 crc kubenswrapper[4773]: I0122 13:22:10.658450 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:22:10 crc kubenswrapper[4773]: E0122 13:22:10.658818 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:22:21 crc kubenswrapper[4773]: I0122 13:22:21.659065 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:22:21 crc kubenswrapper[4773]: E0122 13:22:21.661492 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:22:33 crc kubenswrapper[4773]: I0122 13:22:33.658266 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:22:33 crc kubenswrapper[4773]: E0122 13:22:33.659540 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:22:45 crc kubenswrapper[4773]: I0122 13:22:45.659321 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:22:45 crc kubenswrapper[4773]: E0122 13:22:45.660384 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:22:59 crc kubenswrapper[4773]: I0122 13:22:59.657906 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:22:59 crc kubenswrapper[4773]: E0122 13:22:59.658574 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:23:04 crc kubenswrapper[4773]: I0122 13:23:04.416978 4773 scope.go:117] "RemoveContainer" containerID="d6c91b13c29c2b1118f0f5ddd5107ac35938d9aa5641d2eaeb612edac937dc71" Jan 22 13:23:11 crc kubenswrapper[4773]: I0122 13:23:11.659709 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:23:11 crc kubenswrapper[4773]: E0122 13:23:11.660700 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:23:26 crc kubenswrapper[4773]: I0122 13:23:26.658571 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:23:26 crc kubenswrapper[4773]: E0122 13:23:26.660583 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:23:37 crc kubenswrapper[4773]: I0122 13:23:37.658112 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:23:38 crc kubenswrapper[4773]: I0122 13:23:38.578277 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"22f45b11bbee937fdf871ae5de53af8235a219a99f6dd311534489dc7fbcd516"} Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.285566 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 13:24:12 crc kubenswrapper[4773]: E0122 13:24:12.286627 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="registry-server" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.286650 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="registry-server" Jan 22 13:24:12 crc kubenswrapper[4773]: E0122 13:24:12.286673 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="extract-content" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.286680 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="extract-content" Jan 22 13:24:12 crc kubenswrapper[4773]: E0122 13:24:12.286694 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="extract-utilities" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.286701 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="extract-utilities" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.286893 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9ff8365-176d-4f11-b89e-4387db0b6287" containerName="registry-server" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.287553 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.290809 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-b9zg7" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.293704 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.394463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97962\" (UniqueName: \"kubernetes.io/projected/a14d1fa5-fa02-4064-bffc-6e5bb0f67531-kube-api-access-97962\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.395024 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.495937 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97962\" (UniqueName: \"kubernetes.io/projected/a14d1fa5-fa02-4064-bffc-6e5bb0f67531-kube-api-access-97962\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.496023 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.499239 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.499273 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6e6f05de8f2f201c14318daf1cbe22db2e5a1bc674ae838b72c40cef37077ec3/globalmount\"" pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.512853 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97962\" (UniqueName: \"kubernetes.io/projected/a14d1fa5-fa02-4064-bffc-6e5bb0f67531-kube-api-access-97962\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.525951 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") pod \"mariadb-copy-data\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " pod="openstack/mariadb-copy-data" Jan 22 13:24:12 crc kubenswrapper[4773]: I0122 13:24:12.606632 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 22 13:24:13 crc kubenswrapper[4773]: I0122 13:24:13.206394 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 13:24:14 crc kubenswrapper[4773]: I0122 13:24:14.020030 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"a14d1fa5-fa02-4064-bffc-6e5bb0f67531","Type":"ContainerStarted","Data":"bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142"} Jan 22 13:24:14 crc kubenswrapper[4773]: I0122 13:24:14.021384 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"a14d1fa5-fa02-4064-bffc-6e5bb0f67531","Type":"ContainerStarted","Data":"447604bccc2fca449ce1de1c701a88a0f3cf8621f9e4d0be538d2d6404c0d81c"} Jan 22 13:24:14 crc kubenswrapper[4773]: I0122 13:24:14.043931 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.043904866 podStartE2EDuration="3.043904866s" podCreationTimestamp="2026-01-22 13:24:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:24:14.035934721 +0000 UTC m=+5361.614050546" watchObservedRunningTime="2026-01-22 13:24:14.043904866 +0000 UTC m=+5361.622020691" Jan 22 13:24:16 crc kubenswrapper[4773]: I0122 13:24:16.944391 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:16 crc kubenswrapper[4773]: I0122 13:24:16.946759 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:16 crc kubenswrapper[4773]: I0122 13:24:16.972646 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:17 crc kubenswrapper[4773]: I0122 13:24:17.050131 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz7gr\" (UniqueName: \"kubernetes.io/projected/76268888-f07f-4748-aa64-a0c9568d61af-kube-api-access-xz7gr\") pod \"mariadb-client\" (UID: \"76268888-f07f-4748-aa64-a0c9568d61af\") " pod="openstack/mariadb-client" Jan 22 13:24:17 crc kubenswrapper[4773]: I0122 13:24:17.152003 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz7gr\" (UniqueName: \"kubernetes.io/projected/76268888-f07f-4748-aa64-a0c9568d61af-kube-api-access-xz7gr\") pod \"mariadb-client\" (UID: \"76268888-f07f-4748-aa64-a0c9568d61af\") " pod="openstack/mariadb-client" Jan 22 13:24:17 crc kubenswrapper[4773]: I0122 13:24:17.182220 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz7gr\" (UniqueName: \"kubernetes.io/projected/76268888-f07f-4748-aa64-a0c9568d61af-kube-api-access-xz7gr\") pod \"mariadb-client\" (UID: \"76268888-f07f-4748-aa64-a0c9568d61af\") " pod="openstack/mariadb-client" Jan 22 13:24:17 crc kubenswrapper[4773]: I0122 13:24:17.277686 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:17 crc kubenswrapper[4773]: I0122 13:24:17.697548 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:18 crc kubenswrapper[4773]: I0122 13:24:18.050373 4773 generic.go:334] "Generic (PLEG): container finished" podID="76268888-f07f-4748-aa64-a0c9568d61af" containerID="cec8374999343be725f204def7ea4320bd68fec995e7730c0c90b96bfeb317a1" exitCode=0 Jan 22 13:24:18 crc kubenswrapper[4773]: I0122 13:24:18.050472 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"76268888-f07f-4748-aa64-a0c9568d61af","Type":"ContainerDied","Data":"cec8374999343be725f204def7ea4320bd68fec995e7730c0c90b96bfeb317a1"} Jan 22 13:24:18 crc kubenswrapper[4773]: I0122 13:24:18.050874 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"76268888-f07f-4748-aa64-a0c9568d61af","Type":"ContainerStarted","Data":"b817185e4da2a825667863927f06581a7ccfd604f501082c323a9244bd03d743"} Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.325110 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.345214 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_76268888-f07f-4748-aa64-a0c9568d61af/mariadb-client/0.log" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.368823 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.376126 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.494301 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:19 crc kubenswrapper[4773]: E0122 13:24:19.494768 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76268888-f07f-4748-aa64-a0c9568d61af" containerName="mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.494795 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="76268888-f07f-4748-aa64-a0c9568d61af" containerName="mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.495016 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="76268888-f07f-4748-aa64-a0c9568d61af" containerName="mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.495725 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.501534 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.515235 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz7gr\" (UniqueName: \"kubernetes.io/projected/76268888-f07f-4748-aa64-a0c9568d61af-kube-api-access-xz7gr\") pod \"76268888-f07f-4748-aa64-a0c9568d61af\" (UID: \"76268888-f07f-4748-aa64-a0c9568d61af\") " Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.522317 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76268888-f07f-4748-aa64-a0c9568d61af-kube-api-access-xz7gr" (OuterVolumeSpecName: "kube-api-access-xz7gr") pod "76268888-f07f-4748-aa64-a0c9568d61af" (UID: "76268888-f07f-4748-aa64-a0c9568d61af"). InnerVolumeSpecName "kube-api-access-xz7gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.617232 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql8jj\" (UniqueName: \"kubernetes.io/projected/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd-kube-api-access-ql8jj\") pod \"mariadb-client\" (UID: \"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd\") " pod="openstack/mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.617402 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz7gr\" (UniqueName: \"kubernetes.io/projected/76268888-f07f-4748-aa64-a0c9568d61af-kube-api-access-xz7gr\") on node \"crc\" DevicePath \"\"" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.842431 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql8jj\" (UniqueName: \"kubernetes.io/projected/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd-kube-api-access-ql8jj\") pod \"mariadb-client\" (UID: \"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd\") " pod="openstack/mariadb-client" Jan 22 13:24:19 crc kubenswrapper[4773]: I0122 13:24:19.862910 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql8jj\" (UniqueName: \"kubernetes.io/projected/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd-kube-api-access-ql8jj\") pod \"mariadb-client\" (UID: \"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd\") " pod="openstack/mariadb-client" Jan 22 13:24:20 crc kubenswrapper[4773]: I0122 13:24:20.065938 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b817185e4da2a825667863927f06581a7ccfd604f501082c323a9244bd03d743" Jan 22 13:24:20 crc kubenswrapper[4773]: I0122 13:24:20.066014 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:20 crc kubenswrapper[4773]: I0122 13:24:20.083043 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="76268888-f07f-4748-aa64-a0c9568d61af" podUID="5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" Jan 22 13:24:20 crc kubenswrapper[4773]: I0122 13:24:20.118771 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:20 crc kubenswrapper[4773]: I0122 13:24:20.605576 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:20 crc kubenswrapper[4773]: W0122 13:24:20.608956 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e4424c6_b7fc_4c7a_8929_e20cb77b67fd.slice/crio-bf592773a3fef415a55274842b3c4fe8a9799824e36245f1ba1698df3db2d454 WatchSource:0}: Error finding container bf592773a3fef415a55274842b3c4fe8a9799824e36245f1ba1698df3db2d454: Status 404 returned error can't find the container with id bf592773a3fef415a55274842b3c4fe8a9799824e36245f1ba1698df3db2d454 Jan 22 13:24:20 crc kubenswrapper[4773]: I0122 13:24:20.667066 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76268888-f07f-4748-aa64-a0c9568d61af" path="/var/lib/kubelet/pods/76268888-f07f-4748-aa64-a0c9568d61af/volumes" Jan 22 13:24:21 crc kubenswrapper[4773]: I0122 13:24:21.076763 4773 generic.go:334] "Generic (PLEG): container finished" podID="5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" containerID="86669680a72f26f75de00540c03d33082fd1237b85900a3806086ed0e7423c25" exitCode=0 Jan 22 13:24:21 crc kubenswrapper[4773]: I0122 13:24:21.076820 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd","Type":"ContainerDied","Data":"86669680a72f26f75de00540c03d33082fd1237b85900a3806086ed0e7423c25"} Jan 22 13:24:21 crc kubenswrapper[4773]: I0122 13:24:21.077123 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd","Type":"ContainerStarted","Data":"bf592773a3fef415a55274842b3c4fe8a9799824e36245f1ba1698df3db2d454"} Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.340147 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.360245 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_5e4424c6-b7fc-4c7a-8929-e20cb77b67fd/mariadb-client/0.log" Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.390658 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.397599 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.481356 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql8jj\" (UniqueName: \"kubernetes.io/projected/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd-kube-api-access-ql8jj\") pod \"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd\" (UID: \"5e4424c6-b7fc-4c7a-8929-e20cb77b67fd\") " Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.486959 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd-kube-api-access-ql8jj" (OuterVolumeSpecName: "kube-api-access-ql8jj") pod "5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" (UID: "5e4424c6-b7fc-4c7a-8929-e20cb77b67fd"). InnerVolumeSpecName "kube-api-access-ql8jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.583263 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql8jj\" (UniqueName: \"kubernetes.io/projected/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd-kube-api-access-ql8jj\") on node \"crc\" DevicePath \"\"" Jan 22 13:24:22 crc kubenswrapper[4773]: I0122 13:24:22.670238 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" path="/var/lib/kubelet/pods/5e4424c6-b7fc-4c7a-8929-e20cb77b67fd/volumes" Jan 22 13:24:23 crc kubenswrapper[4773]: I0122 13:24:23.097404 4773 scope.go:117] "RemoveContainer" containerID="86669680a72f26f75de00540c03d33082fd1237b85900a3806086ed0e7423c25" Jan 22 13:24:23 crc kubenswrapper[4773]: I0122 13:24:23.097456 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.186787 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 13:25:03 crc kubenswrapper[4773]: E0122 13:25:03.188793 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" containerName="mariadb-client" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.188887 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" containerName="mariadb-client" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.189107 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e4424c6-b7fc-4c7a-8929-e20cb77b67fd" containerName="mariadb-client" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.190181 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.193020 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.193680 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.193809 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.193931 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.194233 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-b4lnr" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.207231 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.208752 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.224400 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.225790 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.238522 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.251346 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.274275 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356111 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356166 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356193 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356222 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75v5x\" (UniqueName: \"kubernetes.io/projected/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-kube-api-access-75v5x\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356327 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356456 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356481 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96ed693c-6314-414e-84eb-5d82d4ec2ff8-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356499 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4ktb\" (UniqueName: \"kubernetes.io/projected/96ed693c-6314-414e-84eb-5d82d4ec2ff8-kube-api-access-l4ktb\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356560 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356592 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356661 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96ed693c-6314-414e-84eb-5d82d4ec2ff8-config\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356707 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356739 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/96ed693c-6314-414e-84eb-5d82d4ec2ff8-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356801 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356833 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356895 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356938 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-config\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.356966 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.357010 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnn86\" (UniqueName: \"kubernetes.io/projected/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-kube-api-access-mnn86\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.357031 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.357045 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-config\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.357073 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.357097 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458382 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458438 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96ed693c-6314-414e-84eb-5d82d4ec2ff8-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458469 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4ktb\" (UniqueName: \"kubernetes.io/projected/96ed693c-6314-414e-84eb-5d82d4ec2ff8-kube-api-access-l4ktb\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458513 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458535 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458583 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96ed693c-6314-414e-84eb-5d82d4ec2ff8-config\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458609 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458635 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/96ed693c-6314-414e-84eb-5d82d4ec2ff8-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458659 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458696 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458728 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458777 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458817 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-config\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458846 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458876 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnn86\" (UniqueName: \"kubernetes.io/projected/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-kube-api-access-mnn86\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458897 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458916 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-config\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458941 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.458967 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.459004 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.459027 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.459058 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.459095 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75v5x\" (UniqueName: \"kubernetes.io/projected/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-kube-api-access-75v5x\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.459114 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.459834 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/96ed693c-6314-414e-84eb-5d82d4ec2ff8-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.460375 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.460376 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-config\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.460940 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/96ed693c-6314-414e-84eb-5d82d4ec2ff8-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.461116 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.461865 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96ed693c-6314-414e-84eb-5d82d4ec2ff8-config\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.462176 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.462757 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-config\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.463168 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.463600 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4883afbcb540a9c6f78ab2d7cb0c3a0e3cbb505571b0eeea8d9c5a54e1d01959/globalmount\"" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.463741 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.463772 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9961355a6f5fa9e2c819a62c148fd527afbdbac7177fcb169769fe9ea280d2a8/globalmount\"" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.463256 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.464058 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/873c92cc45d50e9a0450a79238d7c4000d79c364da81c597e8c24c76bbc082d7/globalmount\"" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.466057 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.466240 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.466515 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.466591 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.466539 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.467065 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.467327 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.469477 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.470333 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/96ed693c-6314-414e-84eb-5d82d4ec2ff8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.470813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.486813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75v5x\" (UniqueName: \"kubernetes.io/projected/7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4-kube-api-access-75v5x\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.487148 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnn86\" (UniqueName: \"kubernetes.io/projected/d74ce6eb-4311-4bd6-9aa9-2e13183e5180-kube-api-access-mnn86\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.494726 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4ktb\" (UniqueName: \"kubernetes.io/projected/96ed693c-6314-414e-84eb-5d82d4ec2ff8-kube-api-access-l4ktb\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.501051 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a52e5655-8b71-4275-bd98-f5f2422ec214\") pod \"ovsdbserver-nb-1\" (UID: \"d74ce6eb-4311-4bd6-9aa9-2e13183e5180\") " pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.509223 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39eb3663-63de-4f9c-a98f-e8760c1cf278\") pod \"ovsdbserver-nb-2\" (UID: \"96ed693c-6314-414e-84eb-5d82d4ec2ff8\") " pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.512700 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-95cbf18c-7867-482a-8037-2cf385a6acb8\") pod \"ovsdbserver-nb-0\" (UID: \"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4\") " pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.534087 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.551924 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:03 crc kubenswrapper[4773]: I0122 13:25:03.560824 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.024150 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.141984 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.325597 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"d74ce6eb-4311-4bd6-9aa9-2e13183e5180","Type":"ContainerStarted","Data":"5c5ff7678f93c5a79a03db3dbac492c4241722c7f6e258228e830193ccd51d9a"} Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.325652 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"d74ce6eb-4311-4bd6-9aa9-2e13183e5180","Type":"ContainerStarted","Data":"f376620aa9047782a022511fe1ea9dadfb28d52a7e549c3cd17f7264d75e22ea"} Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.330706 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"96ed693c-6314-414e-84eb-5d82d4ec2ff8","Type":"ContainerStarted","Data":"8e16c711c55c4816371aaee7125b20dfb4ccb2aa0c876a859c73537d8fb6a394"} Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.330758 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"96ed693c-6314-414e-84eb-5d82d4ec2ff8","Type":"ContainerStarted","Data":"5548351cdd8b81ad0a1561aa0b751b7e997ee444d26eef594683f4d5fdabdb92"} Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.501790 4773 scope.go:117] "RemoveContainer" containerID="308484e91deabb32171d7af8a675788142922e220d52aad526a35cce3cce6946" Jan 22 13:25:04 crc kubenswrapper[4773]: I0122 13:25:04.827248 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 22 13:25:04 crc kubenswrapper[4773]: W0122 13:25:04.839435 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fb0c726_7d2f_4a39_b42a_c5b3c030d9d4.slice/crio-d1220a22e35271b1e52268eb64c5b22dffeba1098bf569bd510e29da6fa116f7 WatchSource:0}: Error finding container d1220a22e35271b1e52268eb64c5b22dffeba1098bf569bd510e29da6fa116f7: Status 404 returned error can't find the container with id d1220a22e35271b1e52268eb64c5b22dffeba1098bf569bd510e29da6fa116f7 Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.343867 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"d74ce6eb-4311-4bd6-9aa9-2e13183e5180","Type":"ContainerStarted","Data":"29d33cbeae7f419c175f8b152900d51d896c9db40318fc3e8db3792457eb5b2b"} Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.346992 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"96ed693c-6314-414e-84eb-5d82d4ec2ff8","Type":"ContainerStarted","Data":"d5aa3310d392f4a6635605a098aba50b0b0d2a90c7f1ba67474def48fc4c5074"} Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.351222 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4","Type":"ContainerStarted","Data":"904de23c13afd3d7ddf44306402cecb42ededda23a56736088e74475ddc084ae"} Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.351429 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4","Type":"ContainerStarted","Data":"8f87cf73e7cef5190e7a2a17ec7268bdf8b59c47ac2120c7b62f15b6555324fd"} Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.351516 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4","Type":"ContainerStarted","Data":"d1220a22e35271b1e52268eb64c5b22dffeba1098bf569bd510e29da6fa116f7"} Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.379334 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.379273345 podStartE2EDuration="3.379273345s" podCreationTimestamp="2026-01-22 13:25:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:05.371797784 +0000 UTC m=+5412.949913629" watchObservedRunningTime="2026-01-22 13:25:05.379273345 +0000 UTC m=+5412.957389170" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.406380 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.40636406 podStartE2EDuration="3.40636406s" podCreationTimestamp="2026-01-22 13:25:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:05.400863585 +0000 UTC m=+5412.978979410" watchObservedRunningTime="2026-01-22 13:25:05.40636406 +0000 UTC m=+5412.984479885" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.431402 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.431376696 podStartE2EDuration="3.431376696s" podCreationTimestamp="2026-01-22 13:25:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:05.426438267 +0000 UTC m=+5413.004554092" watchObservedRunningTime="2026-01-22 13:25:05.431376696 +0000 UTC m=+5413.009492521" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.522481 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.523918 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.525802 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-j277w" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.526076 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.526249 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.526396 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.537351 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.553210 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.555944 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.566078 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.567484 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.577331 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.582935 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601157 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tx67\" (UniqueName: \"kubernetes.io/projected/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-kube-api-access-4tx67\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601237 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601272 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e160090c-a207-41d2-b56b-64f286a86622\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e160090c-a207-41d2-b56b-64f286a86622\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601318 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601340 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601375 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-config\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601399 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.601423 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702445 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702515 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znj7g\" (UniqueName: \"kubernetes.io/projected/65b8eedc-da58-466b-9774-eb79bc70e3f6-kube-api-access-znj7g\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702556 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tx67\" (UniqueName: \"kubernetes.io/projected/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-kube-api-access-4tx67\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702580 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702633 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702676 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b8eedc-da58-466b-9774-eb79bc70e3f6-config\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702727 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a3200fab-2f90-472d-a9d8-7d75debdc065-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702753 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702780 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65b8eedc-da58-466b-9774-eb79bc70e3f6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702812 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702858 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65b8eedc-da58-466b-9774-eb79bc70e3f6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702885 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702926 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e160090c-a207-41d2-b56b-64f286a86622\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e160090c-a207-41d2-b56b-64f286a86622\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702958 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.702987 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703007 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703027 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703060 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3200fab-2f90-472d-a9d8-7d75debdc065-config\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703091 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8657\" (UniqueName: \"kubernetes.io/projected/a3200fab-2f90-472d-a9d8-7d75debdc065-kube-api-access-s8657\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703119 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-config\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703145 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703171 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3200fab-2f90-472d-a9d8-7d75debdc065-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703191 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.703208 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.704002 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.704398 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-config\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.704605 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.707145 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.707175 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e160090c-a207-41d2-b56b-64f286a86622\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e160090c-a207-41d2-b56b-64f286a86622\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fd5ba59b6043f2365e01b4787948eb1e364b18f6ee81032118aa9908e4fa458c/globalmount\"" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.707654 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.715207 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.715948 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.718427 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tx67\" (UniqueName: \"kubernetes.io/projected/aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d-kube-api-access-4tx67\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.732890 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e160090c-a207-41d2-b56b-64f286a86622\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e160090c-a207-41d2-b56b-64f286a86622\") pod \"ovsdbserver-sb-0\" (UID: \"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d\") " pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.805427 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.805535 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.805646 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3200fab-2f90-472d-a9d8-7d75debdc065-config\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.805720 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8657\" (UniqueName: \"kubernetes.io/projected/a3200fab-2f90-472d-a9d8-7d75debdc065-kube-api-access-s8657\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.805885 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3200fab-2f90-472d-a9d8-7d75debdc065-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.805929 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806081 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806164 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znj7g\" (UniqueName: \"kubernetes.io/projected/65b8eedc-da58-466b-9774-eb79bc70e3f6-kube-api-access-znj7g\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806215 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806389 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806494 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b8eedc-da58-466b-9774-eb79bc70e3f6-config\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806586 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a3200fab-2f90-472d-a9d8-7d75debdc065-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806670 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806739 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65b8eedc-da58-466b-9774-eb79bc70e3f6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806776 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.806858 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65b8eedc-da58-466b-9774-eb79bc70e3f6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.807038 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a3200fab-2f90-472d-a9d8-7d75debdc065-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.807153 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3200fab-2f90-472d-a9d8-7d75debdc065-config\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.807732 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65b8eedc-da58-466b-9774-eb79bc70e3f6-config\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.808215 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65b8eedc-da58-466b-9774-eb79bc70e3f6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.808548 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3200fab-2f90-472d-a9d8-7d75debdc065-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.808611 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65b8eedc-da58-466b-9774-eb79bc70e3f6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.809432 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.809457 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5a291393221a758e19ac505daa4e7168802d12e9ebba6e1c898e56c76ab8e716/globalmount\"" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.809547 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.809559 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.809570 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/79ee04648b46f1f7e6e2f6d7f60cdaca7088634c44e4bd3bc8262f0086a7f6b2/globalmount\"" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.809693 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.812328 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65b8eedc-da58-466b-9774-eb79bc70e3f6-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.812705 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.814069 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.814448 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3200fab-2f90-472d-a9d8-7d75debdc065-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.826813 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znj7g\" (UniqueName: \"kubernetes.io/projected/65b8eedc-da58-466b-9774-eb79bc70e3f6-kube-api-access-znj7g\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.839378 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8657\" (UniqueName: \"kubernetes.io/projected/a3200fab-2f90-472d-a9d8-7d75debdc065-kube-api-access-s8657\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.849843 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.852645 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-3b29d25b-030c-445f-ae9c-8299bceaedc1\") pod \"ovsdbserver-sb-1\" (UID: \"65b8eedc-da58-466b-9774-eb79bc70e3f6\") " pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.856089 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-63faa3ab-fddb-439a-a6b6-8a84c461d594\") pod \"ovsdbserver-sb-2\" (UID: \"a3200fab-2f90-472d-a9d8-7d75debdc065\") " pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.872176 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:05 crc kubenswrapper[4773]: I0122 13:25:05.888206 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:06 crc kubenswrapper[4773]: W0122 13:25:06.219736 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3200fab_2f90_472d_a9d8_7d75debdc065.slice/crio-413d53d0846a53b88e17a65dec81daef34b0cb8a087965f396848cadfa734d8b WatchSource:0}: Error finding container 413d53d0846a53b88e17a65dec81daef34b0cb8a087965f396848cadfa734d8b: Status 404 returned error can't find the container with id 413d53d0846a53b88e17a65dec81daef34b0cb8a087965f396848cadfa734d8b Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.224323 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.320148 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.362616 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"65b8eedc-da58-466b-9774-eb79bc70e3f6","Type":"ContainerStarted","Data":"f9bd67ea23beb152fa8e17a6f9b8a53b81adc909a12faa5b394a8fac39223f73"} Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.367478 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a3200fab-2f90-472d-a9d8-7d75debdc065","Type":"ContainerStarted","Data":"413d53d0846a53b88e17a65dec81daef34b0cb8a087965f396848cadfa734d8b"} Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.414431 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.535262 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.552398 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:06 crc kubenswrapper[4773]: I0122 13:25:06.561875 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.377775 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d","Type":"ContainerStarted","Data":"513f8de0ecb1d47574a957fe6031f6cdd5273065c574fd3175a5acf898f5b24e"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.378101 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d","Type":"ContainerStarted","Data":"f06a94b0f40b7ac9f434284f9d9b073e433fdff528994b44fbf123490d0e96c1"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.378115 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d","Type":"ContainerStarted","Data":"ff59074f36e09d189bda13566bc5d162d0bc17fbd88c9f222a659e25b1534955"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.381097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"65b8eedc-da58-466b-9774-eb79bc70e3f6","Type":"ContainerStarted","Data":"e01eeb7ec811c1892e99853ad1a01a135bb407ca8d0b5e6b47ee4c17d3261132"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.381344 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"65b8eedc-da58-466b-9774-eb79bc70e3f6","Type":"ContainerStarted","Data":"c6818d6056e64570e14b9e91499b244c2e521a909ce24d48ecf23e9901144086"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.384551 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a3200fab-2f90-472d-a9d8-7d75debdc065","Type":"ContainerStarted","Data":"c117871a2401dcbf9c3eb9e9d9f945d33fb3bc7880a514d8ed1dfe6909cd4027"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.384623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"a3200fab-2f90-472d-a9d8-7d75debdc065","Type":"ContainerStarted","Data":"9bd422be74d451cd1b4a0a739419f34b3a844425f3ec37a64dd9b0f9e3f8822f"} Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.406987 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.406967467 podStartE2EDuration="3.406967467s" podCreationTimestamp="2026-01-22 13:25:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:07.400844324 +0000 UTC m=+5414.978960189" watchObservedRunningTime="2026-01-22 13:25:07.406967467 +0000 UTC m=+5414.985083292" Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.448538 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.448517979 podStartE2EDuration="3.448517979s" podCreationTimestamp="2026-01-22 13:25:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:07.445766492 +0000 UTC m=+5415.023882377" watchObservedRunningTime="2026-01-22 13:25:07.448517979 +0000 UTC m=+5415.026633804" Jan 22 13:25:07 crc kubenswrapper[4773]: I0122 13:25:07.448647 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.448641963 podStartE2EDuration="3.448641963s" podCreationTimestamp="2026-01-22 13:25:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:07.428670429 +0000 UTC m=+5415.006786274" watchObservedRunningTime="2026-01-22 13:25:07.448641963 +0000 UTC m=+5415.026757788" Jan 22 13:25:08 crc kubenswrapper[4773]: I0122 13:25:08.535109 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:08 crc kubenswrapper[4773]: I0122 13:25:08.552592 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:08 crc kubenswrapper[4773]: I0122 13:25:08.560992 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:08 crc kubenswrapper[4773]: I0122 13:25:08.850554 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:08 crc kubenswrapper[4773]: I0122 13:25:08.872757 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:08 crc kubenswrapper[4773]: I0122 13:25:08.889044 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:09 crc kubenswrapper[4773]: I0122 13:25:09.573750 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:09 crc kubenswrapper[4773]: I0122 13:25:09.604959 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:09 crc kubenswrapper[4773]: I0122 13:25:09.616406 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:09 crc kubenswrapper[4773]: I0122 13:25:09.659759 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Jan 22 13:25:09 crc kubenswrapper[4773]: I0122 13:25:09.668786 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.068228 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55d7ccf77f-j4spr"] Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.069609 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.072481 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.089033 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96b4s\" (UniqueName: \"kubernetes.io/projected/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-kube-api-access-96b4s\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.089118 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-config\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.089166 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-dns-svc\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.089197 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-ovsdbserver-nb\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.094045 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55d7ccf77f-j4spr"] Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.191012 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96b4s\" (UniqueName: \"kubernetes.io/projected/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-kube-api-access-96b4s\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.191138 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-config\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.191196 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-dns-svc\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.191236 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-ovsdbserver-nb\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.192238 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-config\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.192409 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-ovsdbserver-nb\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.192451 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-dns-svc\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.218185 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96b4s\" (UniqueName: \"kubernetes.io/projected/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-kube-api-access-96b4s\") pod \"dnsmasq-dns-55d7ccf77f-j4spr\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.388400 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.850756 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.854004 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55d7ccf77f-j4spr"] Jan 22 13:25:10 crc kubenswrapper[4773]: W0122 13:25:10.860784 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1b30f96_b5b8_49de_98b1_f4689f0c6e54.slice/crio-5b4397b2e9e68de3d8abcb772e1383fbd2a02a71be1f98cadef894179943ef40 WatchSource:0}: Error finding container 5b4397b2e9e68de3d8abcb772e1383fbd2a02a71be1f98cadef894179943ef40: Status 404 returned error can't find the container with id 5b4397b2e9e68de3d8abcb772e1383fbd2a02a71be1f98cadef894179943ef40 Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.872606 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:10 crc kubenswrapper[4773]: I0122 13:25:10.893695 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.422732 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" event={"ID":"e1b30f96-b5b8-49de-98b1-f4689f0c6e54","Type":"ContainerStarted","Data":"629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3"} Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.423167 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" event={"ID":"e1b30f96-b5b8-49de-98b1-f4689f0c6e54","Type":"ContainerStarted","Data":"5b4397b2e9e68de3d8abcb772e1383fbd2a02a71be1f98cadef894179943ef40"} Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.922974 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.933910 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.945095 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.968901 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.977671 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Jan 22 13:25:11 crc kubenswrapper[4773]: I0122 13:25:11.992024 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.268550 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55d7ccf77f-j4spr"] Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.288316 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78885857fc-lf9xc"] Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.289596 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.291501 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.304058 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78885857fc-lf9xc"] Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.432707 4773 generic.go:334] "Generic (PLEG): container finished" podID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerID="629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3" exitCode=0 Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.432936 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" event={"ID":"e1b30f96-b5b8-49de-98b1-f4689f0c6e54","Type":"ContainerDied","Data":"629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3"} Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.452619 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-nb\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.452675 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-sb\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.452699 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtdnz\" (UniqueName: \"kubernetes.io/projected/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-kube-api-access-jtdnz\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.452973 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-config\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.453038 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-dns-svc\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.567512 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-config\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.567582 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-dns-svc\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.567838 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-nb\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.567887 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-sb\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.567920 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtdnz\" (UniqueName: \"kubernetes.io/projected/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-kube-api-access-jtdnz\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.572355 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-nb\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.579860 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-config\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.588977 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-sb\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.590534 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-dns-svc\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.595162 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtdnz\" (UniqueName: \"kubernetes.io/projected/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-kube-api-access-jtdnz\") pod \"dnsmasq-dns-78885857fc-lf9xc\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:12 crc kubenswrapper[4773]: I0122 13:25:12.610263 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.041145 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78885857fc-lf9xc"] Jan 22 13:25:13 crc kubenswrapper[4773]: W0122 13:25:13.043868 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fba5bb8_8cc7_4ee5_a191_18ad766bf87b.slice/crio-66085ad4c04869218b265219c37956d269665f8826138746cc5232f0a10e3ecf WatchSource:0}: Error finding container 66085ad4c04869218b265219c37956d269665f8826138746cc5232f0a10e3ecf: Status 404 returned error can't find the container with id 66085ad4c04869218b265219c37956d269665f8826138746cc5232f0a10e3ecf Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.442800 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" event={"ID":"e1b30f96-b5b8-49de-98b1-f4689f0c6e54","Type":"ContainerStarted","Data":"6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf"} Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.443151 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.442920 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerName="dnsmasq-dns" containerID="cri-o://6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf" gracePeriod=10 Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.444895 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" event={"ID":"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b","Type":"ContainerStarted","Data":"1718aa9395072deb53bd3750ede88c266ed64896ea7f0c9d9b0614dcb7dce59f"} Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.444925 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" event={"ID":"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b","Type":"ContainerStarted","Data":"66085ad4c04869218b265219c37956d269665f8826138746cc5232f0a10e3ecf"} Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.464732 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" podStartSLOduration=3.464702457 podStartE2EDuration="3.464702457s" podCreationTimestamp="2026-01-22 13:25:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:13.460465237 +0000 UTC m=+5421.038581072" watchObservedRunningTime="2026-01-22 13:25:13.464702457 +0000 UTC m=+5421.042818282" Jan 22 13:25:13 crc kubenswrapper[4773]: I0122 13:25:13.752089 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.406945 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.478891 4773 generic.go:334] "Generic (PLEG): container finished" podID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerID="1718aa9395072deb53bd3750ede88c266ed64896ea7f0c9d9b0614dcb7dce59f" exitCode=0 Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.478985 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" event={"ID":"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b","Type":"ContainerDied","Data":"1718aa9395072deb53bd3750ede88c266ed64896ea7f0c9d9b0614dcb7dce59f"} Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.481143 4773 generic.go:334] "Generic (PLEG): container finished" podID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerID="6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf" exitCode=0 Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.481189 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" event={"ID":"e1b30f96-b5b8-49de-98b1-f4689f0c6e54","Type":"ContainerDied","Data":"6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf"} Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.481216 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" event={"ID":"e1b30f96-b5b8-49de-98b1-f4689f0c6e54","Type":"ContainerDied","Data":"5b4397b2e9e68de3d8abcb772e1383fbd2a02a71be1f98cadef894179943ef40"} Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.481235 4773 scope.go:117] "RemoveContainer" containerID="6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.481381 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55d7ccf77f-j4spr" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.508022 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-dns-svc\") pod \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.508084 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-ovsdbserver-nb\") pod \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.508221 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96b4s\" (UniqueName: \"kubernetes.io/projected/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-kube-api-access-96b4s\") pod \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.508244 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-config\") pod \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\" (UID: \"e1b30f96-b5b8-49de-98b1-f4689f0c6e54\") " Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.518680 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-kube-api-access-96b4s" (OuterVolumeSpecName: "kube-api-access-96b4s") pod "e1b30f96-b5b8-49de-98b1-f4689f0c6e54" (UID: "e1b30f96-b5b8-49de-98b1-f4689f0c6e54"). InnerVolumeSpecName "kube-api-access-96b4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.556920 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e1b30f96-b5b8-49de-98b1-f4689f0c6e54" (UID: "e1b30f96-b5b8-49de-98b1-f4689f0c6e54"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.566492 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e1b30f96-b5b8-49de-98b1-f4689f0c6e54" (UID: "e1b30f96-b5b8-49de-98b1-f4689f0c6e54"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.571146 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-config" (OuterVolumeSpecName: "config") pod "e1b30f96-b5b8-49de-98b1-f4689f0c6e54" (UID: "e1b30f96-b5b8-49de-98b1-f4689f0c6e54"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.611135 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.611178 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96b4s\" (UniqueName: \"kubernetes.io/projected/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-kube-api-access-96b4s\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.611194 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.611203 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1b30f96-b5b8-49de-98b1-f4689f0c6e54-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.627768 4773 scope.go:117] "RemoveContainer" containerID="629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.649482 4773 scope.go:117] "RemoveContainer" containerID="6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf" Jan 22 13:25:14 crc kubenswrapper[4773]: E0122 13:25:14.650200 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf\": container with ID starting with 6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf not found: ID does not exist" containerID="6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.650261 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf"} err="failed to get container status \"6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf\": rpc error: code = NotFound desc = could not find container \"6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf\": container with ID starting with 6b88c20c5f1beb90868b6bb01d2e89cb866df9e5f8c8c1be67e2255291db0fcf not found: ID does not exist" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.650295 4773 scope.go:117] "RemoveContainer" containerID="629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3" Jan 22 13:25:14 crc kubenswrapper[4773]: E0122 13:25:14.650661 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3\": container with ID starting with 629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3 not found: ID does not exist" containerID="629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.650689 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3"} err="failed to get container status \"629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3\": rpc error: code = NotFound desc = could not find container \"629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3\": container with ID starting with 629f068224d567b5a7baaa1beae0402b5ad334997225f24cb9977b75de7cdcb3 not found: ID does not exist" Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.803728 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55d7ccf77f-j4spr"] Jan 22 13:25:14 crc kubenswrapper[4773]: I0122 13:25:14.810236 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55d7ccf77f-j4spr"] Jan 22 13:25:15 crc kubenswrapper[4773]: I0122 13:25:15.492597 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" event={"ID":"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b","Type":"ContainerStarted","Data":"eecca40acdc37965ab1832b81d359abdee0bd58e08f07e13a06f245b5c8b37b9"} Jan 22 13:25:15 crc kubenswrapper[4773]: I0122 13:25:15.492780 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.671250 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" path="/var/lib/kubelet/pods/e1b30f96-b5b8-49de-98b1-f4689f0c6e54/volumes" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.836672 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" podStartSLOduration=4.8366444600000005 podStartE2EDuration="4.83664446s" podCreationTimestamp="2026-01-22 13:25:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:15.520475511 +0000 UTC m=+5423.098591346" watchObservedRunningTime="2026-01-22 13:25:16.83664446 +0000 UTC m=+5424.414760305" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.847877 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Jan 22 13:25:16 crc kubenswrapper[4773]: E0122 13:25:16.848824 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerName="init" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.848849 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerName="init" Jan 22 13:25:16 crc kubenswrapper[4773]: E0122 13:25:16.848905 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerName="dnsmasq-dns" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.848919 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerName="dnsmasq-dns" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.849547 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1b30f96-b5b8-49de-98b1-f4689f0c6e54" containerName="dnsmasq-dns" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.850855 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.855335 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.869466 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.956688 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.956769 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/938f08ac-3fcb-4667-8af5-61f91610e9df-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:16 crc kubenswrapper[4773]: I0122 13:25:16.956857 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzfxw\" (UniqueName: \"kubernetes.io/projected/938f08ac-3fcb-4667-8af5-61f91610e9df-kube-api-access-tzfxw\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.058465 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzfxw\" (UniqueName: \"kubernetes.io/projected/938f08ac-3fcb-4667-8af5-61f91610e9df-kube-api-access-tzfxw\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.058542 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.058600 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/938f08ac-3fcb-4667-8af5-61f91610e9df-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.061078 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.061116 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6c3b7411c99235148d24db8be9276371fd6c36ed4e150b9dc65eed6fc3dbc2ce/globalmount\"" pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.074517 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/938f08ac-3fcb-4667-8af5-61f91610e9df-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.076542 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzfxw\" (UniqueName: \"kubernetes.io/projected/938f08ac-3fcb-4667-8af5-61f91610e9df-kube-api-access-tzfxw\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.121801 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") pod \"ovn-copy-data\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.192354 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 22 13:25:17 crc kubenswrapper[4773]: I0122 13:25:17.907673 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 13:25:17 crc kubenswrapper[4773]: W0122 13:25:17.911749 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod938f08ac_3fcb_4667_8af5_61f91610e9df.slice/crio-4fd69411df887dd8d661928114c2c404feebf7d69b9c272e0fe3db986b801376 WatchSource:0}: Error finding container 4fd69411df887dd8d661928114c2c404feebf7d69b9c272e0fe3db986b801376: Status 404 returned error can't find the container with id 4fd69411df887dd8d661928114c2c404feebf7d69b9c272e0fe3db986b801376 Jan 22 13:25:18 crc kubenswrapper[4773]: I0122 13:25:18.527729 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"938f08ac-3fcb-4667-8af5-61f91610e9df","Type":"ContainerStarted","Data":"4fd69411df887dd8d661928114c2c404feebf7d69b9c272e0fe3db986b801376"} Jan 22 13:25:19 crc kubenswrapper[4773]: I0122 13:25:19.537154 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"938f08ac-3fcb-4667-8af5-61f91610e9df","Type":"ContainerStarted","Data":"1af58a65f114fd81891be0393bf9cb4d322789b5c642ba6da987551569c81c4f"} Jan 22 13:25:19 crc kubenswrapper[4773]: I0122 13:25:19.558176 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.999726572 podStartE2EDuration="4.558153854s" podCreationTimestamp="2026-01-22 13:25:15 +0000 UTC" firstStartedPulling="2026-01-22 13:25:17.916940301 +0000 UTC m=+5425.495056126" lastFinishedPulling="2026-01-22 13:25:18.475367563 +0000 UTC m=+5426.053483408" observedRunningTime="2026-01-22 13:25:19.551503497 +0000 UTC m=+5427.129619342" watchObservedRunningTime="2026-01-22 13:25:19.558153854 +0000 UTC m=+5427.136269679" Jan 22 13:25:22 crc kubenswrapper[4773]: I0122 13:25:22.612721 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:22 crc kubenswrapper[4773]: I0122 13:25:22.679081 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-msmv4"] Jan 22 13:25:22 crc kubenswrapper[4773]: I0122 13:25:22.679336 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-699964fbc-msmv4" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerName="dnsmasq-dns" containerID="cri-o://7b7ad8b0e4ed026a75819ac37467ffc8b0ea9b0edb15ed266bb7369204df7860" gracePeriod=10 Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.588309 4773 generic.go:334] "Generic (PLEG): container finished" podID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerID="7b7ad8b0e4ed026a75819ac37467ffc8b0ea9b0edb15ed266bb7369204df7860" exitCode=0 Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.588352 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-msmv4" event={"ID":"c8803480-c4a7-4eee-ac77-a6d9838af931","Type":"ContainerDied","Data":"7b7ad8b0e4ed026a75819ac37467ffc8b0ea9b0edb15ed266bb7369204df7860"} Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.779781 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.898074 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8bgm\" (UniqueName: \"kubernetes.io/projected/c8803480-c4a7-4eee-ac77-a6d9838af931-kube-api-access-f8bgm\") pod \"c8803480-c4a7-4eee-ac77-a6d9838af931\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.898150 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-config\") pod \"c8803480-c4a7-4eee-ac77-a6d9838af931\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.898182 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-dns-svc\") pod \"c8803480-c4a7-4eee-ac77-a6d9838af931\" (UID: \"c8803480-c4a7-4eee-ac77-a6d9838af931\") " Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.904125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8803480-c4a7-4eee-ac77-a6d9838af931-kube-api-access-f8bgm" (OuterVolumeSpecName: "kube-api-access-f8bgm") pod "c8803480-c4a7-4eee-ac77-a6d9838af931" (UID: "c8803480-c4a7-4eee-ac77-a6d9838af931"). InnerVolumeSpecName "kube-api-access-f8bgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.940435 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-config" (OuterVolumeSpecName: "config") pod "c8803480-c4a7-4eee-ac77-a6d9838af931" (UID: "c8803480-c4a7-4eee-ac77-a6d9838af931"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:23 crc kubenswrapper[4773]: I0122 13:25:23.942803 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c8803480-c4a7-4eee-ac77-a6d9838af931" (UID: "c8803480-c4a7-4eee-ac77-a6d9838af931"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.000707 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8bgm\" (UniqueName: \"kubernetes.io/projected/c8803480-c4a7-4eee-ac77-a6d9838af931-kube-api-access-f8bgm\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.000735 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.000748 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c8803480-c4a7-4eee-ac77-a6d9838af931-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.597313 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-msmv4" event={"ID":"c8803480-c4a7-4eee-ac77-a6d9838af931","Type":"ContainerDied","Data":"2e3eefb65927ae0358ae0571853995b3ed6ff42e27fe217e85192de1950f18b7"} Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.597656 4773 scope.go:117] "RemoveContainer" containerID="7b7ad8b0e4ed026a75819ac37467ffc8b0ea9b0edb15ed266bb7369204df7860" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.597364 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-msmv4" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.628774 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-msmv4"] Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.629946 4773 scope.go:117] "RemoveContainer" containerID="fb052a5190d910ebc6abffeb7f24307f0c8744122b6dd1c08f6892358f98385b" Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.635138 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-msmv4"] Jan 22 13:25:24 crc kubenswrapper[4773]: I0122 13:25:24.669428 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" path="/var/lib/kubelet/pods/c8803480-c4a7-4eee-ac77-a6d9838af931/volumes" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.563943 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 22 13:25:25 crc kubenswrapper[4773]: E0122 13:25:25.564401 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerName="dnsmasq-dns" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.564417 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerName="dnsmasq-dns" Jan 22 13:25:25 crc kubenswrapper[4773]: E0122 13:25:25.564428 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerName="init" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.564436 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerName="init" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.564731 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8803480-c4a7-4eee-ac77-a6d9838af931" containerName="dnsmasq-dns" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.566076 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.571970 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.572263 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-96bwg" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.572543 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.573705 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.577599 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.733546 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.733652 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.733871 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.733947 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-scripts\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.734136 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq6m9\" (UniqueName: \"kubernetes.io/projected/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-kube-api-access-bq6m9\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.734203 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.734246 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-config\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.837134 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.837763 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.837800 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-scripts\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.839224 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-scripts\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.839355 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq6m9\" (UniqueName: \"kubernetes.io/projected/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-kube-api-access-bq6m9\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.839834 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.839861 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-config\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.840066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.840908 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.841322 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-config\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.843313 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.843316 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.845986 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.861285 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq6m9\" (UniqueName: \"kubernetes.io/projected/47b6e6f2-15bb-47c7-bf7b-b4c925e27d36-kube-api-access-bq6m9\") pod \"ovn-northd-0\" (UID: \"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36\") " pod="openstack/ovn-northd-0" Jan 22 13:25:25 crc kubenswrapper[4773]: I0122 13:25:25.892884 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 22 13:25:26 crc kubenswrapper[4773]: I0122 13:25:26.307953 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 22 13:25:26 crc kubenswrapper[4773]: W0122 13:25:26.313359 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47b6e6f2_15bb_47c7_bf7b_b4c925e27d36.slice/crio-30f585df35f1bce9e7e63b36e3ec3eb59ea5bacec4300838673c778a7d3b9aa2 WatchSource:0}: Error finding container 30f585df35f1bce9e7e63b36e3ec3eb59ea5bacec4300838673c778a7d3b9aa2: Status 404 returned error can't find the container with id 30f585df35f1bce9e7e63b36e3ec3eb59ea5bacec4300838673c778a7d3b9aa2 Jan 22 13:25:26 crc kubenswrapper[4773]: I0122 13:25:26.618174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36","Type":"ContainerStarted","Data":"e50e69e7e93c90a35d03bc5ff7d464d1987901c993e8eda7123eb580e0809e57"} Jan 22 13:25:26 crc kubenswrapper[4773]: I0122 13:25:26.618227 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36","Type":"ContainerStarted","Data":"30f585df35f1bce9e7e63b36e3ec3eb59ea5bacec4300838673c778a7d3b9aa2"} Jan 22 13:25:27 crc kubenswrapper[4773]: I0122 13:25:27.627071 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"47b6e6f2-15bb-47c7-bf7b-b4c925e27d36","Type":"ContainerStarted","Data":"9e6df97db3cc015341c3a4bb12fe8accd710233aec1d0289e4dfbdcdffae6802"} Jan 22 13:25:27 crc kubenswrapper[4773]: I0122 13:25:27.627516 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 22 13:25:27 crc kubenswrapper[4773]: I0122 13:25:27.652516 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.652493868 podStartE2EDuration="2.652493868s" podCreationTimestamp="2026-01-22 13:25:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:27.651626073 +0000 UTC m=+5435.229741908" watchObservedRunningTime="2026-01-22 13:25:27.652493868 +0000 UTC m=+5435.230609703" Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.751754 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-9psgh"] Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.754673 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.777225 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-9psgh"] Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.863597 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c30b-account-create-update-gsngt"] Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.864975 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.874569 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.881230 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c30b-account-create-update-gsngt"] Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.926213 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96667553-15cb-4ca6-88e4-2a1cd4da88df-operator-scripts\") pod \"keystone-db-create-9psgh\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:30 crc kubenswrapper[4773]: I0122 13:25:30.926546 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrj76\" (UniqueName: \"kubernetes.io/projected/96667553-15cb-4ca6-88e4-2a1cd4da88df-kube-api-access-zrj76\") pod \"keystone-db-create-9psgh\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.027644 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e821c273-29fb-417c-a66c-e216a2b67666-operator-scripts\") pod \"keystone-c30b-account-create-update-gsngt\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.027729 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrj76\" (UniqueName: \"kubernetes.io/projected/96667553-15cb-4ca6-88e4-2a1cd4da88df-kube-api-access-zrj76\") pod \"keystone-db-create-9psgh\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.027827 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnvn8\" (UniqueName: \"kubernetes.io/projected/e821c273-29fb-417c-a66c-e216a2b67666-kube-api-access-bnvn8\") pod \"keystone-c30b-account-create-update-gsngt\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.027856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96667553-15cb-4ca6-88e4-2a1cd4da88df-operator-scripts\") pod \"keystone-db-create-9psgh\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.028735 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96667553-15cb-4ca6-88e4-2a1cd4da88df-operator-scripts\") pod \"keystone-db-create-9psgh\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.046390 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrj76\" (UniqueName: \"kubernetes.io/projected/96667553-15cb-4ca6-88e4-2a1cd4da88df-kube-api-access-zrj76\") pod \"keystone-db-create-9psgh\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.079855 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.131440 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnvn8\" (UniqueName: \"kubernetes.io/projected/e821c273-29fb-417c-a66c-e216a2b67666-kube-api-access-bnvn8\") pod \"keystone-c30b-account-create-update-gsngt\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.131550 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e821c273-29fb-417c-a66c-e216a2b67666-operator-scripts\") pod \"keystone-c30b-account-create-update-gsngt\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.132785 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e821c273-29fb-417c-a66c-e216a2b67666-operator-scripts\") pod \"keystone-c30b-account-create-update-gsngt\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.150732 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnvn8\" (UniqueName: \"kubernetes.io/projected/e821c273-29fb-417c-a66c-e216a2b67666-kube-api-access-bnvn8\") pod \"keystone-c30b-account-create-update-gsngt\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.209795 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.551261 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-9psgh"] Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.625571 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c30b-account-create-update-gsngt"] Jan 22 13:25:31 crc kubenswrapper[4773]: W0122 13:25:31.644476 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode821c273_29fb_417c_a66c_e216a2b67666.slice/crio-012a2da0b5b754e651e8f4f8216f28d983794333c3ff04626ab67f5dcf5233cb WatchSource:0}: Error finding container 012a2da0b5b754e651e8f4f8216f28d983794333c3ff04626ab67f5dcf5233cb: Status 404 returned error can't find the container with id 012a2da0b5b754e651e8f4f8216f28d983794333c3ff04626ab67f5dcf5233cb Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.658108 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c30b-account-create-update-gsngt" event={"ID":"e821c273-29fb-417c-a66c-e216a2b67666","Type":"ContainerStarted","Data":"012a2da0b5b754e651e8f4f8216f28d983794333c3ff04626ab67f5dcf5233cb"} Jan 22 13:25:31 crc kubenswrapper[4773]: I0122 13:25:31.659240 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9psgh" event={"ID":"96667553-15cb-4ca6-88e4-2a1cd4da88df","Type":"ContainerStarted","Data":"c6ceb5b879312a8704776c09d15eb8894a84671ca0e91616673aa4635bb9dbb8"} Jan 22 13:25:32 crc kubenswrapper[4773]: I0122 13:25:32.668581 4773 generic.go:334] "Generic (PLEG): container finished" podID="e821c273-29fb-417c-a66c-e216a2b67666" containerID="81bcf52f424f152c063a670f1d4cf1b5e0fd41058217f011784e661ffdc406a2" exitCode=0 Jan 22 13:25:32 crc kubenswrapper[4773]: I0122 13:25:32.668652 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c30b-account-create-update-gsngt" event={"ID":"e821c273-29fb-417c-a66c-e216a2b67666","Type":"ContainerDied","Data":"81bcf52f424f152c063a670f1d4cf1b5e0fd41058217f011784e661ffdc406a2"} Jan 22 13:25:32 crc kubenswrapper[4773]: I0122 13:25:32.670958 4773 generic.go:334] "Generic (PLEG): container finished" podID="96667553-15cb-4ca6-88e4-2a1cd4da88df" containerID="8cbd0324163e50da599ad7216870f0f9e4a9f11145302132234de25c57346386" exitCode=0 Jan 22 13:25:32 crc kubenswrapper[4773]: I0122 13:25:32.670987 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9psgh" event={"ID":"96667553-15cb-4ca6-88e4-2a1cd4da88df","Type":"ContainerDied","Data":"8cbd0324163e50da599ad7216870f0f9e4a9f11145302132234de25c57346386"} Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.095487 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.101750 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.182488 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrj76\" (UniqueName: \"kubernetes.io/projected/96667553-15cb-4ca6-88e4-2a1cd4da88df-kube-api-access-zrj76\") pod \"96667553-15cb-4ca6-88e4-2a1cd4da88df\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.182829 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnvn8\" (UniqueName: \"kubernetes.io/projected/e821c273-29fb-417c-a66c-e216a2b67666-kube-api-access-bnvn8\") pod \"e821c273-29fb-417c-a66c-e216a2b67666\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.183024 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96667553-15cb-4ca6-88e4-2a1cd4da88df-operator-scripts\") pod \"96667553-15cb-4ca6-88e4-2a1cd4da88df\" (UID: \"96667553-15cb-4ca6-88e4-2a1cd4da88df\") " Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.183281 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e821c273-29fb-417c-a66c-e216a2b67666-operator-scripts\") pod \"e821c273-29fb-417c-a66c-e216a2b67666\" (UID: \"e821c273-29fb-417c-a66c-e216a2b67666\") " Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.183747 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96667553-15cb-4ca6-88e4-2a1cd4da88df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96667553-15cb-4ca6-88e4-2a1cd4da88df" (UID: "96667553-15cb-4ca6-88e4-2a1cd4da88df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.184133 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96667553-15cb-4ca6-88e4-2a1cd4da88df-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.184242 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e821c273-29fb-417c-a66c-e216a2b67666-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e821c273-29fb-417c-a66c-e216a2b67666" (UID: "e821c273-29fb-417c-a66c-e216a2b67666"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.189648 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96667553-15cb-4ca6-88e4-2a1cd4da88df-kube-api-access-zrj76" (OuterVolumeSpecName: "kube-api-access-zrj76") pod "96667553-15cb-4ca6-88e4-2a1cd4da88df" (UID: "96667553-15cb-4ca6-88e4-2a1cd4da88df"). InnerVolumeSpecName "kube-api-access-zrj76". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.190468 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e821c273-29fb-417c-a66c-e216a2b67666-kube-api-access-bnvn8" (OuterVolumeSpecName: "kube-api-access-bnvn8") pod "e821c273-29fb-417c-a66c-e216a2b67666" (UID: "e821c273-29fb-417c-a66c-e216a2b67666"). InnerVolumeSpecName "kube-api-access-bnvn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.285582 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e821c273-29fb-417c-a66c-e216a2b67666-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.285634 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrj76\" (UniqueName: \"kubernetes.io/projected/96667553-15cb-4ca6-88e4-2a1cd4da88df-kube-api-access-zrj76\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.285644 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnvn8\" (UniqueName: \"kubernetes.io/projected/e821c273-29fb-417c-a66c-e216a2b67666-kube-api-access-bnvn8\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.690466 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c30b-account-create-update-gsngt" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.690611 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c30b-account-create-update-gsngt" event={"ID":"e821c273-29fb-417c-a66c-e216a2b67666","Type":"ContainerDied","Data":"012a2da0b5b754e651e8f4f8216f28d983794333c3ff04626ab67f5dcf5233cb"} Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.690794 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="012a2da0b5b754e651e8f4f8216f28d983794333c3ff04626ab67f5dcf5233cb" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.692879 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9psgh" event={"ID":"96667553-15cb-4ca6-88e4-2a1cd4da88df","Type":"ContainerDied","Data":"c6ceb5b879312a8704776c09d15eb8894a84671ca0e91616673aa4635bb9dbb8"} Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.692953 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6ceb5b879312a8704776c09d15eb8894a84671ca0e91616673aa4635bb9dbb8" Jan 22 13:25:34 crc kubenswrapper[4773]: I0122 13:25:34.692994 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9psgh" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.351087 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-825kp"] Jan 22 13:25:36 crc kubenswrapper[4773]: E0122 13:25:36.351784 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96667553-15cb-4ca6-88e4-2a1cd4da88df" containerName="mariadb-database-create" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.351804 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="96667553-15cb-4ca6-88e4-2a1cd4da88df" containerName="mariadb-database-create" Jan 22 13:25:36 crc kubenswrapper[4773]: E0122 13:25:36.351835 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e821c273-29fb-417c-a66c-e216a2b67666" containerName="mariadb-account-create-update" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.351842 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e821c273-29fb-417c-a66c-e216a2b67666" containerName="mariadb-account-create-update" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.352021 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="96667553-15cb-4ca6-88e4-2a1cd4da88df" containerName="mariadb-database-create" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.352046 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e821c273-29fb-417c-a66c-e216a2b67666" containerName="mariadb-account-create-update" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.352836 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.355456 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.355912 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.357922 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.358509 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zhn" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.361906 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-825kp"] Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.450062 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mqspt"] Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.451763 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.463999 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqspt"] Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.532382 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq9pg\" (UniqueName: \"kubernetes.io/projected/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-kube-api-access-bq9pg\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.532803 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-config-data\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.532988 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-combined-ca-bundle\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.634796 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-config-data\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.634905 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-combined-ca-bundle\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.634967 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-catalog-content\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.635035 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6fxs\" (UniqueName: \"kubernetes.io/projected/3787fe38-1a81-4a5a-960d-757d2132cd9f-kube-api-access-l6fxs\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.635120 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq9pg\" (UniqueName: \"kubernetes.io/projected/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-kube-api-access-bq9pg\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.635210 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-utilities\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.644167 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-combined-ca-bundle\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.644325 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-config-data\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.659357 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq9pg\" (UniqueName: \"kubernetes.io/projected/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-kube-api-access-bq9pg\") pod \"keystone-db-sync-825kp\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.672435 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.736571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-catalog-content\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.736635 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6fxs\" (UniqueName: \"kubernetes.io/projected/3787fe38-1a81-4a5a-960d-757d2132cd9f-kube-api-access-l6fxs\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.736709 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-utilities\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.737213 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-utilities\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.737391 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-catalog-content\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.761178 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6fxs\" (UniqueName: \"kubernetes.io/projected/3787fe38-1a81-4a5a-960d-757d2132cd9f-kube-api-access-l6fxs\") pod \"certified-operators-mqspt\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:36 crc kubenswrapper[4773]: I0122 13:25:36.782328 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.216633 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-825kp"] Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.315946 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mqspt"] Jan 22 13:25:37 crc kubenswrapper[4773]: W0122 13:25:37.323562 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3787fe38_1a81_4a5a_960d_757d2132cd9f.slice/crio-3097e2fb7b0c8bacb038fa8e7dcbcdb1cce36e419612511f3c63884d1a0b7053 WatchSource:0}: Error finding container 3097e2fb7b0c8bacb038fa8e7dcbcdb1cce36e419612511f3c63884d1a0b7053: Status 404 returned error can't find the container with id 3097e2fb7b0c8bacb038fa8e7dcbcdb1cce36e419612511f3c63884d1a0b7053 Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.720282 4773 generic.go:334] "Generic (PLEG): container finished" podID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerID="d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078" exitCode=0 Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.720485 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerDied","Data":"d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078"} Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.720516 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerStarted","Data":"3097e2fb7b0c8bacb038fa8e7dcbcdb1cce36e419612511f3c63884d1a0b7053"} Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.725721 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-825kp" event={"ID":"8a19a55a-727e-4c77-8be7-1bd9c960c1a3","Type":"ContainerStarted","Data":"1e89ebca9eceddf72d3cd58fc9b10b78bbf00c1141580acf8cc7b7c10d22ac76"} Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.725766 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-825kp" event={"ID":"8a19a55a-727e-4c77-8be7-1bd9c960c1a3","Type":"ContainerStarted","Data":"58e026092a2f59c816620ffcdefb9d256b8523cef7816b9350dc21835fbf5833"} Jan 22 13:25:37 crc kubenswrapper[4773]: I0122 13:25:37.772922 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-825kp" podStartSLOduration=1.772900025 podStartE2EDuration="1.772900025s" podCreationTimestamp="2026-01-22 13:25:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:37.764532269 +0000 UTC m=+5445.342648094" watchObservedRunningTime="2026-01-22 13:25:37.772900025 +0000 UTC m=+5445.351015850" Jan 22 13:25:38 crc kubenswrapper[4773]: I0122 13:25:38.737910 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerStarted","Data":"b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82"} Jan 22 13:25:39 crc kubenswrapper[4773]: I0122 13:25:39.752261 4773 generic.go:334] "Generic (PLEG): container finished" podID="8a19a55a-727e-4c77-8be7-1bd9c960c1a3" containerID="1e89ebca9eceddf72d3cd58fc9b10b78bbf00c1141580acf8cc7b7c10d22ac76" exitCode=0 Jan 22 13:25:39 crc kubenswrapper[4773]: I0122 13:25:39.752379 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-825kp" event={"ID":"8a19a55a-727e-4c77-8be7-1bd9c960c1a3","Type":"ContainerDied","Data":"1e89ebca9eceddf72d3cd58fc9b10b78bbf00c1141580acf8cc7b7c10d22ac76"} Jan 22 13:25:39 crc kubenswrapper[4773]: I0122 13:25:39.756222 4773 generic.go:334] "Generic (PLEG): container finished" podID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerID="b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82" exitCode=0 Jan 22 13:25:39 crc kubenswrapper[4773]: I0122 13:25:39.756332 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerDied","Data":"b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82"} Jan 22 13:25:39 crc kubenswrapper[4773]: I0122 13:25:39.761682 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:25:40 crc kubenswrapper[4773]: I0122 13:25:40.766082 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerStarted","Data":"5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f"} Jan 22 13:25:40 crc kubenswrapper[4773]: I0122 13:25:40.795487 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mqspt" podStartSLOduration=2.37290101 podStartE2EDuration="4.795461387s" podCreationTimestamp="2026-01-22 13:25:36 +0000 UTC" firstStartedPulling="2026-01-22 13:25:37.72241674 +0000 UTC m=+5445.300532575" lastFinishedPulling="2026-01-22 13:25:40.144977127 +0000 UTC m=+5447.723092952" observedRunningTime="2026-01-22 13:25:40.781904574 +0000 UTC m=+5448.360020409" watchObservedRunningTime="2026-01-22 13:25:40.795461387 +0000 UTC m=+5448.373577222" Jan 22 13:25:40 crc kubenswrapper[4773]: I0122 13:25:40.948912 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.107636 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.220044 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq9pg\" (UniqueName: \"kubernetes.io/projected/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-kube-api-access-bq9pg\") pod \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.220220 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-config-data\") pod \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.220348 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-combined-ca-bundle\") pod \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\" (UID: \"8a19a55a-727e-4c77-8be7-1bd9c960c1a3\") " Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.225828 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-kube-api-access-bq9pg" (OuterVolumeSpecName: "kube-api-access-bq9pg") pod "8a19a55a-727e-4c77-8be7-1bd9c960c1a3" (UID: "8a19a55a-727e-4c77-8be7-1bd9c960c1a3"). InnerVolumeSpecName "kube-api-access-bq9pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.244089 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a19a55a-727e-4c77-8be7-1bd9c960c1a3" (UID: "8a19a55a-727e-4c77-8be7-1bd9c960c1a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.263622 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-config-data" (OuterVolumeSpecName: "config-data") pod "8a19a55a-727e-4c77-8be7-1bd9c960c1a3" (UID: "8a19a55a-727e-4c77-8be7-1bd9c960c1a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.322041 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.322315 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.322326 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq9pg\" (UniqueName: \"kubernetes.io/projected/8a19a55a-727e-4c77-8be7-1bd9c960c1a3-kube-api-access-bq9pg\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.777487 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-825kp" event={"ID":"8a19a55a-727e-4c77-8be7-1bd9c960c1a3","Type":"ContainerDied","Data":"58e026092a2f59c816620ffcdefb9d256b8523cef7816b9350dc21835fbf5833"} Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.777563 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58e026092a2f59c816620ffcdefb9d256b8523cef7816b9350dc21835fbf5833" Jan 22 13:25:41 crc kubenswrapper[4773]: I0122 13:25:41.777512 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-825kp" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.016515 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c84f5549f-bw82x"] Jan 22 13:25:42 crc kubenswrapper[4773]: E0122 13:25:42.016958 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a19a55a-727e-4c77-8be7-1bd9c960c1a3" containerName="keystone-db-sync" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.016980 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a19a55a-727e-4c77-8be7-1bd9c960c1a3" containerName="keystone-db-sync" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.017226 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a19a55a-727e-4c77-8be7-1bd9c960c1a3" containerName="keystone-db-sync" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.018350 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.045581 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c84f5549f-bw82x"] Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.093978 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-sjb4r"] Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.095388 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.098519 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.098543 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.098670 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.098785 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zhn" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.098873 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.100062 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sjb4r"] Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.138524 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-dns-svc\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.138623 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-config\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.138752 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-sb\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.138829 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-nb\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.138866 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlfxw\" (UniqueName: \"kubernetes.io/projected/3ae9528a-e508-435e-b609-cb8108e195d9-kube-api-access-zlfxw\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.240408 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-config\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.240549 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-combined-ca-bundle\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.240589 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-fernet-keys\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.240620 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-sb\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.240871 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-nb\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.240947 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlfxw\" (UniqueName: \"kubernetes.io/projected/3ae9528a-e508-435e-b609-cb8108e195d9-kube-api-access-zlfxw\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241098 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-dns-svc\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241134 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-config-data\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241154 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbkxc\" (UniqueName: \"kubernetes.io/projected/2b6af944-5e2b-44a7-aeab-71e33e986f86-kube-api-access-xbkxc\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241195 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-scripts\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241214 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-credential-keys\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241704 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-config\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.241718 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-sb\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.242006 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-dns-svc\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.242019 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-nb\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.258129 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlfxw\" (UniqueName: \"kubernetes.io/projected/3ae9528a-e508-435e-b609-cb8108e195d9-kube-api-access-zlfxw\") pod \"dnsmasq-dns-c84f5549f-bw82x\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.343265 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-config-data\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.343357 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbkxc\" (UniqueName: \"kubernetes.io/projected/2b6af944-5e2b-44a7-aeab-71e33e986f86-kube-api-access-xbkxc\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.343405 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-scripts\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.343436 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-credential-keys\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.343551 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-combined-ca-bundle\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.343591 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-fernet-keys\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.350135 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.363352 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-credential-keys\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.363640 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-combined-ca-bundle\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.363936 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-fernet-keys\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.364161 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-config-data\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.367621 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-scripts\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.369688 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbkxc\" (UniqueName: \"kubernetes.io/projected/2b6af944-5e2b-44a7-aeab-71e33e986f86-kube-api-access-xbkxc\") pod \"keystone-bootstrap-sjb4r\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.423956 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.843855 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c84f5549f-bw82x"] Jan 22 13:25:42 crc kubenswrapper[4773]: I0122 13:25:42.932463 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-sjb4r"] Jan 22 13:25:42 crc kubenswrapper[4773]: W0122 13:25:42.943175 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b6af944_5e2b_44a7_aeab_71e33e986f86.slice/crio-c802fd1765536455603ce9326d61d92233085d08fca30e71113f036d93561b64 WatchSource:0}: Error finding container c802fd1765536455603ce9326d61d92233085d08fca30e71113f036d93561b64: Status 404 returned error can't find the container with id c802fd1765536455603ce9326d61d92233085d08fca30e71113f036d93561b64 Jan 22 13:25:43 crc kubenswrapper[4773]: I0122 13:25:43.794883 4773 generic.go:334] "Generic (PLEG): container finished" podID="3ae9528a-e508-435e-b609-cb8108e195d9" containerID="75fa8f812bfbd3031dcea5554a21c495e7693e3380fef40bfba41f0b7268a17b" exitCode=0 Jan 22 13:25:43 crc kubenswrapper[4773]: I0122 13:25:43.795088 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" event={"ID":"3ae9528a-e508-435e-b609-cb8108e195d9","Type":"ContainerDied","Data":"75fa8f812bfbd3031dcea5554a21c495e7693e3380fef40bfba41f0b7268a17b"} Jan 22 13:25:43 crc kubenswrapper[4773]: I0122 13:25:43.795264 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" event={"ID":"3ae9528a-e508-435e-b609-cb8108e195d9","Type":"ContainerStarted","Data":"26b69d1403deb49fa430f29b9a2d5214b450e94f3bc35f8c1ba52cb9f0027df0"} Jan 22 13:25:43 crc kubenswrapper[4773]: I0122 13:25:43.797240 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sjb4r" event={"ID":"2b6af944-5e2b-44a7-aeab-71e33e986f86","Type":"ContainerStarted","Data":"d4fcb7f54800a45cfef6b302cda49d727782b0b3411ad6b7b244961d4ec521bb"} Jan 22 13:25:43 crc kubenswrapper[4773]: I0122 13:25:43.797267 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sjb4r" event={"ID":"2b6af944-5e2b-44a7-aeab-71e33e986f86","Type":"ContainerStarted","Data":"c802fd1765536455603ce9326d61d92233085d08fca30e71113f036d93561b64"} Jan 22 13:25:43 crc kubenswrapper[4773]: I0122 13:25:43.847881 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-sjb4r" podStartSLOduration=1.8478587210000001 podStartE2EDuration="1.847858721s" podCreationTimestamp="2026-01-22 13:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:43.844936979 +0000 UTC m=+5451.423052844" watchObservedRunningTime="2026-01-22 13:25:43.847858721 +0000 UTC m=+5451.425974546" Jan 22 13:25:44 crc kubenswrapper[4773]: I0122 13:25:44.808395 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" event={"ID":"3ae9528a-e508-435e-b609-cb8108e195d9","Type":"ContainerStarted","Data":"af03ccc60cd307473b7d0c13da8e6c55ad53bf94aed3f875560bf5e204324481"} Jan 22 13:25:44 crc kubenswrapper[4773]: I0122 13:25:44.808853 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:44 crc kubenswrapper[4773]: I0122 13:25:44.836386 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" podStartSLOduration=3.836358422 podStartE2EDuration="3.836358422s" podCreationTimestamp="2026-01-22 13:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:44.828914722 +0000 UTC m=+5452.407030557" watchObservedRunningTime="2026-01-22 13:25:44.836358422 +0000 UTC m=+5452.414474257" Jan 22 13:25:46 crc kubenswrapper[4773]: I0122 13:25:46.783515 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:46 crc kubenswrapper[4773]: I0122 13:25:46.783935 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:46 crc kubenswrapper[4773]: I0122 13:25:46.843713 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:46 crc kubenswrapper[4773]: I0122 13:25:46.902979 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:47 crc kubenswrapper[4773]: I0122 13:25:47.083554 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqspt"] Jan 22 13:25:47 crc kubenswrapper[4773]: I0122 13:25:47.840848 4773 generic.go:334] "Generic (PLEG): container finished" podID="2b6af944-5e2b-44a7-aeab-71e33e986f86" containerID="d4fcb7f54800a45cfef6b302cda49d727782b0b3411ad6b7b244961d4ec521bb" exitCode=0 Jan 22 13:25:47 crc kubenswrapper[4773]: I0122 13:25:47.840984 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sjb4r" event={"ID":"2b6af944-5e2b-44a7-aeab-71e33e986f86","Type":"ContainerDied","Data":"d4fcb7f54800a45cfef6b302cda49d727782b0b3411ad6b7b244961d4ec521bb"} Jan 22 13:25:48 crc kubenswrapper[4773]: I0122 13:25:48.849643 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mqspt" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="registry-server" containerID="cri-o://5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f" gracePeriod=2 Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.237988 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.394211 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.415873 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-config-data\") pod \"2b6af944-5e2b-44a7-aeab-71e33e986f86\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.415951 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbkxc\" (UniqueName: \"kubernetes.io/projected/2b6af944-5e2b-44a7-aeab-71e33e986f86-kube-api-access-xbkxc\") pod \"2b6af944-5e2b-44a7-aeab-71e33e986f86\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.416020 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-credential-keys\") pod \"2b6af944-5e2b-44a7-aeab-71e33e986f86\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.416061 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-fernet-keys\") pod \"2b6af944-5e2b-44a7-aeab-71e33e986f86\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.416094 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-combined-ca-bundle\") pod \"2b6af944-5e2b-44a7-aeab-71e33e986f86\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.416223 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-scripts\") pod \"2b6af944-5e2b-44a7-aeab-71e33e986f86\" (UID: \"2b6af944-5e2b-44a7-aeab-71e33e986f86\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.424524 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2b6af944-5e2b-44a7-aeab-71e33e986f86" (UID: "2b6af944-5e2b-44a7-aeab-71e33e986f86"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.424561 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2b6af944-5e2b-44a7-aeab-71e33e986f86" (UID: "2b6af944-5e2b-44a7-aeab-71e33e986f86"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.425264 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b6af944-5e2b-44a7-aeab-71e33e986f86-kube-api-access-xbkxc" (OuterVolumeSpecName: "kube-api-access-xbkxc") pod "2b6af944-5e2b-44a7-aeab-71e33e986f86" (UID: "2b6af944-5e2b-44a7-aeab-71e33e986f86"). InnerVolumeSpecName "kube-api-access-xbkxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.425430 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-scripts" (OuterVolumeSpecName: "scripts") pod "2b6af944-5e2b-44a7-aeab-71e33e986f86" (UID: "2b6af944-5e2b-44a7-aeab-71e33e986f86"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.442633 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b6af944-5e2b-44a7-aeab-71e33e986f86" (UID: "2b6af944-5e2b-44a7-aeab-71e33e986f86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.443843 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-config-data" (OuterVolumeSpecName: "config-data") pod "2b6af944-5e2b-44a7-aeab-71e33e986f86" (UID: "2b6af944-5e2b-44a7-aeab-71e33e986f86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.520997 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6fxs\" (UniqueName: \"kubernetes.io/projected/3787fe38-1a81-4a5a-960d-757d2132cd9f-kube-api-access-l6fxs\") pod \"3787fe38-1a81-4a5a-960d-757d2132cd9f\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521072 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-utilities\") pod \"3787fe38-1a81-4a5a-960d-757d2132cd9f\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521327 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-catalog-content\") pod \"3787fe38-1a81-4a5a-960d-757d2132cd9f\" (UID: \"3787fe38-1a81-4a5a-960d-757d2132cd9f\") " Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521743 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521768 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521781 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbkxc\" (UniqueName: \"kubernetes.io/projected/2b6af944-5e2b-44a7-aeab-71e33e986f86-kube-api-access-xbkxc\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521794 4773 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521805 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.521814 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b6af944-5e2b-44a7-aeab-71e33e986f86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.522253 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-utilities" (OuterVolumeSpecName: "utilities") pod "3787fe38-1a81-4a5a-960d-757d2132cd9f" (UID: "3787fe38-1a81-4a5a-960d-757d2132cd9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.523959 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3787fe38-1a81-4a5a-960d-757d2132cd9f-kube-api-access-l6fxs" (OuterVolumeSpecName: "kube-api-access-l6fxs") pod "3787fe38-1a81-4a5a-960d-757d2132cd9f" (UID: "3787fe38-1a81-4a5a-960d-757d2132cd9f"). InnerVolumeSpecName "kube-api-access-l6fxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.568610 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3787fe38-1a81-4a5a-960d-757d2132cd9f" (UID: "3787fe38-1a81-4a5a-960d-757d2132cd9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.622410 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.622451 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6fxs\" (UniqueName: \"kubernetes.io/projected/3787fe38-1a81-4a5a-960d-757d2132cd9f-kube-api-access-l6fxs\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.622461 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3787fe38-1a81-4a5a-960d-757d2132cd9f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.862939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-sjb4r" event={"ID":"2b6af944-5e2b-44a7-aeab-71e33e986f86","Type":"ContainerDied","Data":"c802fd1765536455603ce9326d61d92233085d08fca30e71113f036d93561b64"} Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.863422 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c802fd1765536455603ce9326d61d92233085d08fca30e71113f036d93561b64" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.862973 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-sjb4r" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.865694 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mqspt" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.865848 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerDied","Data":"5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f"} Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.866009 4773 scope.go:117] "RemoveContainer" containerID="5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.865584 4773 generic.go:334] "Generic (PLEG): container finished" podID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerID="5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f" exitCode=0 Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.867493 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mqspt" event={"ID":"3787fe38-1a81-4a5a-960d-757d2132cd9f","Type":"ContainerDied","Data":"3097e2fb7b0c8bacb038fa8e7dcbcdb1cce36e419612511f3c63884d1a0b7053"} Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.895311 4773 scope.go:117] "RemoveContainer" containerID="b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.959937 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mqspt"] Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.969420 4773 scope.go:117] "RemoveContainer" containerID="d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078" Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.979599 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mqspt"] Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.988708 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-sjb4r"] Jan 22 13:25:49 crc kubenswrapper[4773]: I0122 13:25:49.996196 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-sjb4r"] Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.034767 4773 scope.go:117] "RemoveContainer" containerID="5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f" Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.035151 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f\": container with ID starting with 5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f not found: ID does not exist" containerID="5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.035194 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f"} err="failed to get container status \"5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f\": rpc error: code = NotFound desc = could not find container \"5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f\": container with ID starting with 5c7117aa63dfa42123079d36dabb91bd50521b1aad3ac89bffe0f8ba5eeeb15f not found: ID does not exist" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.035223 4773 scope.go:117] "RemoveContainer" containerID="b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82" Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.035611 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82\": container with ID starting with b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82 not found: ID does not exist" containerID="b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.035641 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82"} err="failed to get container status \"b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82\": rpc error: code = NotFound desc = could not find container \"b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82\": container with ID starting with b07d984ebaeddd9b3a821c8ebaf8b64e0c0b8ae24cd415107ee0e977f391dd82 not found: ID does not exist" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.035660 4773 scope.go:117] "RemoveContainer" containerID="d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078" Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.035948 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078\": container with ID starting with d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078 not found: ID does not exist" containerID="d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.035977 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078"} err="failed to get container status \"d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078\": rpc error: code = NotFound desc = could not find container \"d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078\": container with ID starting with d2033266fa0df2ae49b93a62e35e1a072724bbe2e18f25265124dd795515b078 not found: ID does not exist" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.038065 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-2hpvb"] Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.038935 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="registry-server" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.039037 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="registry-server" Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.039132 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="extract-content" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.039208 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="extract-content" Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.039310 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b6af944-5e2b-44a7-aeab-71e33e986f86" containerName="keystone-bootstrap" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.039415 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b6af944-5e2b-44a7-aeab-71e33e986f86" containerName="keystone-bootstrap" Jan 22 13:25:50 crc kubenswrapper[4773]: E0122 13:25:50.039502 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="extract-utilities" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.039576 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="extract-utilities" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.039831 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" containerName="registry-server" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.039912 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b6af944-5e2b-44a7-aeab-71e33e986f86" containerName="keystone-bootstrap" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.040590 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.049773 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zhn" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.050023 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.050178 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.056720 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.057096 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.069650 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2hpvb"] Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.153685 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-scripts\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.153742 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-credential-keys\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.153781 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lqnt\" (UniqueName: \"kubernetes.io/projected/381a6d42-86f2-4444-ae9b-af0f9918d115-kube-api-access-5lqnt\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.153821 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-combined-ca-bundle\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.153857 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-config-data\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.153890 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-fernet-keys\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.255399 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-scripts\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.255472 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-credential-keys\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.255533 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lqnt\" (UniqueName: \"kubernetes.io/projected/381a6d42-86f2-4444-ae9b-af0f9918d115-kube-api-access-5lqnt\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.255584 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-combined-ca-bundle\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.255627 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-config-data\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.255670 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-fernet-keys\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.259486 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-scripts\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.259559 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-credential-keys\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.260170 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-fernet-keys\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.260170 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-config-data\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.266192 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-combined-ca-bundle\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.271522 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lqnt\" (UniqueName: \"kubernetes.io/projected/381a6d42-86f2-4444-ae9b-af0f9918d115-kube-api-access-5lqnt\") pod \"keystone-bootstrap-2hpvb\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.397325 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.670648 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b6af944-5e2b-44a7-aeab-71e33e986f86" path="/var/lib/kubelet/pods/2b6af944-5e2b-44a7-aeab-71e33e986f86/volumes" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.671732 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3787fe38-1a81-4a5a-960d-757d2132cd9f" path="/var/lib/kubelet/pods/3787fe38-1a81-4a5a-960d-757d2132cd9f/volumes" Jan 22 13:25:50 crc kubenswrapper[4773]: I0122 13:25:50.908624 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2hpvb"] Jan 22 13:25:51 crc kubenswrapper[4773]: I0122 13:25:51.908470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2hpvb" event={"ID":"381a6d42-86f2-4444-ae9b-af0f9918d115","Type":"ContainerStarted","Data":"bd3e120d71e493df75912d5e613d25ab17046e440593bc7508487d70eb308c05"} Jan 22 13:25:51 crc kubenswrapper[4773]: I0122 13:25:51.908873 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2hpvb" event={"ID":"381a6d42-86f2-4444-ae9b-af0f9918d115","Type":"ContainerStarted","Data":"3e18c32a0b554b0b631a5e5514800b68307dfd18addd7f1e00704b6517966d3f"} Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.351857 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.379245 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-2hpvb" podStartSLOduration=2.379172388 podStartE2EDuration="2.379172388s" podCreationTimestamp="2026-01-22 13:25:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:25:51.94487608 +0000 UTC m=+5459.522991905" watchObservedRunningTime="2026-01-22 13:25:52.379172388 +0000 UTC m=+5459.957288213" Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.413605 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78885857fc-lf9xc"] Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.413937 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="dnsmasq-dns" containerID="cri-o://eecca40acdc37965ab1832b81d359abdee0bd58e08f07e13a06f245b5c8b37b9" gracePeriod=10 Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.611404 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.18:5353: connect: connection refused" Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.916608 4773 generic.go:334] "Generic (PLEG): container finished" podID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerID="eecca40acdc37965ab1832b81d359abdee0bd58e08f07e13a06f245b5c8b37b9" exitCode=0 Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.917422 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" event={"ID":"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b","Type":"ContainerDied","Data":"eecca40acdc37965ab1832b81d359abdee0bd58e08f07e13a06f245b5c8b37b9"} Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.917470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" event={"ID":"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b","Type":"ContainerDied","Data":"66085ad4c04869218b265219c37956d269665f8826138746cc5232f0a10e3ecf"} Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.917482 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66085ad4c04869218b265219c37956d269665f8826138746cc5232f0a10e3ecf" Jan 22 13:25:52 crc kubenswrapper[4773]: I0122 13:25:52.960616 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.107263 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtdnz\" (UniqueName: \"kubernetes.io/projected/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-kube-api-access-jtdnz\") pod \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.107526 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-nb\") pod \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.107594 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-config\") pod \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.107641 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-sb\") pod \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.107676 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-dns-svc\") pod \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\" (UID: \"1fba5bb8-8cc7-4ee5-a191-18ad766bf87b\") " Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.112794 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-kube-api-access-jtdnz" (OuterVolumeSpecName: "kube-api-access-jtdnz") pod "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" (UID: "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b"). InnerVolumeSpecName "kube-api-access-jtdnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.145039 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" (UID: "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.149760 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-config" (OuterVolumeSpecName: "config") pod "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" (UID: "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.150370 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" (UID: "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.160220 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" (UID: "1fba5bb8-8cc7-4ee5-a191-18ad766bf87b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.212609 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.212664 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.212686 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.212698 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.212712 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtdnz\" (UniqueName: \"kubernetes.io/projected/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b-kube-api-access-jtdnz\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.927102 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78885857fc-lf9xc" Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.960204 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78885857fc-lf9xc"] Jan 22 13:25:53 crc kubenswrapper[4773]: I0122 13:25:53.965576 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78885857fc-lf9xc"] Jan 22 13:25:54 crc kubenswrapper[4773]: I0122 13:25:54.670187 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" path="/var/lib/kubelet/pods/1fba5bb8-8cc7-4ee5-a191-18ad766bf87b/volumes" Jan 22 13:25:56 crc kubenswrapper[4773]: I0122 13:25:56.092950 4773 generic.go:334] "Generic (PLEG): container finished" podID="381a6d42-86f2-4444-ae9b-af0f9918d115" containerID="bd3e120d71e493df75912d5e613d25ab17046e440593bc7508487d70eb308c05" exitCode=0 Jan 22 13:25:56 crc kubenswrapper[4773]: I0122 13:25:56.093027 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2hpvb" event={"ID":"381a6d42-86f2-4444-ae9b-af0f9918d115","Type":"ContainerDied","Data":"bd3e120d71e493df75912d5e613d25ab17046e440593bc7508487d70eb308c05"} Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.429154 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.615215 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-credential-keys\") pod \"381a6d42-86f2-4444-ae9b-af0f9918d115\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.615278 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-combined-ca-bundle\") pod \"381a6d42-86f2-4444-ae9b-af0f9918d115\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.615356 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-scripts\") pod \"381a6d42-86f2-4444-ae9b-af0f9918d115\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.615401 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lqnt\" (UniqueName: \"kubernetes.io/projected/381a6d42-86f2-4444-ae9b-af0f9918d115-kube-api-access-5lqnt\") pod \"381a6d42-86f2-4444-ae9b-af0f9918d115\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.615537 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-config-data\") pod \"381a6d42-86f2-4444-ae9b-af0f9918d115\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.615577 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-fernet-keys\") pod \"381a6d42-86f2-4444-ae9b-af0f9918d115\" (UID: \"381a6d42-86f2-4444-ae9b-af0f9918d115\") " Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.620491 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "381a6d42-86f2-4444-ae9b-af0f9918d115" (UID: "381a6d42-86f2-4444-ae9b-af0f9918d115"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.620695 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/381a6d42-86f2-4444-ae9b-af0f9918d115-kube-api-access-5lqnt" (OuterVolumeSpecName: "kube-api-access-5lqnt") pod "381a6d42-86f2-4444-ae9b-af0f9918d115" (UID: "381a6d42-86f2-4444-ae9b-af0f9918d115"). InnerVolumeSpecName "kube-api-access-5lqnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.621090 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "381a6d42-86f2-4444-ae9b-af0f9918d115" (UID: "381a6d42-86f2-4444-ae9b-af0f9918d115"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.630501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-scripts" (OuterVolumeSpecName: "scripts") pod "381a6d42-86f2-4444-ae9b-af0f9918d115" (UID: "381a6d42-86f2-4444-ae9b-af0f9918d115"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.639211 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-config-data" (OuterVolumeSpecName: "config-data") pod "381a6d42-86f2-4444-ae9b-af0f9918d115" (UID: "381a6d42-86f2-4444-ae9b-af0f9918d115"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.642597 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "381a6d42-86f2-4444-ae9b-af0f9918d115" (UID: "381a6d42-86f2-4444-ae9b-af0f9918d115"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.718482 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.718523 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.718533 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lqnt\" (UniqueName: \"kubernetes.io/projected/381a6d42-86f2-4444-ae9b-af0f9918d115-kube-api-access-5lqnt\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.718542 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.718550 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:57 crc kubenswrapper[4773]: I0122 13:25:57.718560 4773 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/381a6d42-86f2-4444-ae9b-af0f9918d115-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.112440 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2hpvb" event={"ID":"381a6d42-86f2-4444-ae9b-af0f9918d115","Type":"ContainerDied","Data":"3e18c32a0b554b0b631a5e5514800b68307dfd18addd7f1e00704b6517966d3f"} Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.112669 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e18c32a0b554b0b631a5e5514800b68307dfd18addd7f1e00704b6517966d3f" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.112625 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2hpvb" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.188116 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-54c687bbf4-87mjw"] Jan 22 13:25:58 crc kubenswrapper[4773]: E0122 13:25:58.188883 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="381a6d42-86f2-4444-ae9b-af0f9918d115" containerName="keystone-bootstrap" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.188909 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="381a6d42-86f2-4444-ae9b-af0f9918d115" containerName="keystone-bootstrap" Jan 22 13:25:58 crc kubenswrapper[4773]: E0122 13:25:58.188930 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="init" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.188936 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="init" Jan 22 13:25:58 crc kubenswrapper[4773]: E0122 13:25:58.188946 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="dnsmasq-dns" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.188954 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="dnsmasq-dns" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.189094 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fba5bb8-8cc7-4ee5-a191-18ad766bf87b" containerName="dnsmasq-dns" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.189110 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="381a6d42-86f2-4444-ae9b-af0f9918d115" containerName="keystone-bootstrap" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.189723 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.192743 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.192743 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.197307 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.197685 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.197740 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.198401 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-m8zhn" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.208007 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54c687bbf4-87mjw"] Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328248 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pw96\" (UniqueName: \"kubernetes.io/projected/11af641b-1498-4f72-885a-35b8531e251c-kube-api-access-6pw96\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328321 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-credential-keys\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328351 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-combined-ca-bundle\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328426 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-config-data\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328449 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-scripts\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328467 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-public-tls-certs\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328565 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-fernet-keys\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.328624 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-internal-tls-certs\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.431453 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pw96\" (UniqueName: \"kubernetes.io/projected/11af641b-1498-4f72-885a-35b8531e251c-kube-api-access-6pw96\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.431513 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-credential-keys\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.431556 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-combined-ca-bundle\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.431585 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-config-data\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.431617 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-scripts\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.431640 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-public-tls-certs\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.432365 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-fernet-keys\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.432394 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-internal-tls-certs\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.436388 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-config-data\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.436478 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-combined-ca-bundle\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.436549 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-scripts\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.436947 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-internal-tls-certs\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.437065 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-public-tls-certs\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.437969 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-fernet-keys\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.439670 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/11af641b-1498-4f72-885a-35b8531e251c-credential-keys\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.452115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pw96\" (UniqueName: \"kubernetes.io/projected/11af641b-1498-4f72-885a-35b8531e251c-kube-api-access-6pw96\") pod \"keystone-54c687bbf4-87mjw\" (UID: \"11af641b-1498-4f72-885a-35b8531e251c\") " pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.511887 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:25:58 crc kubenswrapper[4773]: I0122 13:25:58.940916 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-54c687bbf4-87mjw"] Jan 22 13:25:59 crc kubenswrapper[4773]: I0122 13:25:59.121141 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54c687bbf4-87mjw" event={"ID":"11af641b-1498-4f72-885a-35b8531e251c","Type":"ContainerStarted","Data":"dcfe260b8271b2539bee3ad76c55e705c7abdc791d0ada6bae38c0a9e5b56b30"} Jan 22 13:26:00 crc kubenswrapper[4773]: I0122 13:26:00.137803 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-54c687bbf4-87mjw" event={"ID":"11af641b-1498-4f72-885a-35b8531e251c","Type":"ContainerStarted","Data":"740961e46609de7bab14e080c366280b3fd4dd3d2d4b079f5ff86c38ad756a27"} Jan 22 13:26:00 crc kubenswrapper[4773]: I0122 13:26:00.138203 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:26:00 crc kubenswrapper[4773]: I0122 13:26:00.172007 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-54c687bbf4-87mjw" podStartSLOduration=2.171975309 podStartE2EDuration="2.171975309s" podCreationTimestamp="2026-01-22 13:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:26:00.163603533 +0000 UTC m=+5467.741719388" watchObservedRunningTime="2026-01-22 13:26:00.171975309 +0000 UTC m=+5467.750091134" Jan 22 13:26:04 crc kubenswrapper[4773]: I0122 13:26:04.074070 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:26:04 crc kubenswrapper[4773]: I0122 13:26:04.074510 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.154017 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-54c687bbf4-87mjw" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.800575 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.801837 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.804528 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.804643 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-8f8px" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.806664 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.817645 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.873493 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:30 crc kubenswrapper[4773]: E0122 13:26:30.875275 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-ml9w5 openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="99948424-1ea5-4591-8fe7-62871febc6e6" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.884509 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.936384 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.938151 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.939904 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml9w5\" (UniqueName: \"kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.939968 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.940071 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.940146 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:30 crc kubenswrapper[4773]: I0122 13:26:30.961595 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.041767 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.041841 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.041888 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdc86\" (UniqueName: \"kubernetes.io/projected/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-kube-api-access-mdc86\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.041925 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config-secret\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.041956 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml9w5\" (UniqueName: \"kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.041980 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.042015 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.042272 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.042801 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: E0122 13:26:31.044787 4773 projected.go:194] Error preparing data for projected volume kube-api-access-ml9w5 for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (99948424-1ea5-4591-8fe7-62871febc6e6) does not match the UID in record. The object might have been deleted and then recreated Jan 22 13:26:31 crc kubenswrapper[4773]: E0122 13:26:31.044910 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5 podName:99948424-1ea5-4591-8fe7-62871febc6e6 nodeName:}" failed. No retries permitted until 2026-01-22 13:26:31.54487615 +0000 UTC m=+5499.122991975 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-ml9w5" (UniqueName: "kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5") pod "openstackclient" (UID: "99948424-1ea5-4591-8fe7-62871febc6e6") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (99948424-1ea5-4591-8fe7-62871febc6e6) does not match the UID in record. The object might have been deleted and then recreated Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.051001 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.051215 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.144096 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.144173 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdc86\" (UniqueName: \"kubernetes.io/projected/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-kube-api-access-mdc86\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.144211 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config-secret\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.144251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.145205 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.148823 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config-secret\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.149907 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.159773 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdc86\" (UniqueName: \"kubernetes.io/projected/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-kube-api-access-mdc86\") pod \"openstackclient\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.262593 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.454880 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.459734 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="99948424-1ea5-4591-8fe7-62871febc6e6" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.476888 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.554156 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml9w5\" (UniqueName: \"kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5\") pod \"openstackclient\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " pod="openstack/openstackclient" Jan 22 13:26:31 crc kubenswrapper[4773]: E0122 13:26:31.556594 4773 projected.go:194] Error preparing data for projected volume kube-api-access-ml9w5 for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (99948424-1ea5-4591-8fe7-62871febc6e6) does not match the UID in record. The object might have been deleted and then recreated Jan 22 13:26:31 crc kubenswrapper[4773]: E0122 13:26:31.556966 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5 podName:99948424-1ea5-4591-8fe7-62871febc6e6 nodeName:}" failed. No retries permitted until 2026-01-22 13:26:32.556945813 +0000 UTC m=+5500.135061638 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-ml9w5" (UniqueName: "kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5") pod "openstackclient" (UID: "99948424-1ea5-4591-8fe7-62871febc6e6") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (99948424-1ea5-4591-8fe7-62871febc6e6) does not match the UID in record. The object might have been deleted and then recreated Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.655824 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config\") pod \"99948424-1ea5-4591-8fe7-62871febc6e6\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.655954 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config-secret\") pod \"99948424-1ea5-4591-8fe7-62871febc6e6\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.656112 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-combined-ca-bundle\") pod \"99948424-1ea5-4591-8fe7-62871febc6e6\" (UID: \"99948424-1ea5-4591-8fe7-62871febc6e6\") " Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.656586 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml9w5\" (UniqueName: \"kubernetes.io/projected/99948424-1ea5-4591-8fe7-62871febc6e6-kube-api-access-ml9w5\") on node \"crc\" DevicePath \"\"" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.656953 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "99948424-1ea5-4591-8fe7-62871febc6e6" (UID: "99948424-1ea5-4591-8fe7-62871febc6e6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.660558 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99948424-1ea5-4591-8fe7-62871febc6e6" (UID: "99948424-1ea5-4591-8fe7-62871febc6e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.662512 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "99948424-1ea5-4591-8fe7-62871febc6e6" (UID: "99948424-1ea5-4591-8fe7-62871febc6e6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.687445 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.760926 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.760962 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:26:31 crc kubenswrapper[4773]: I0122 13:26:31.760971 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/99948424-1ea5-4591-8fe7-62871febc6e6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 13:26:32 crc kubenswrapper[4773]: I0122 13:26:32.469963 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:26:32 crc kubenswrapper[4773]: I0122 13:26:32.469971 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6","Type":"ContainerStarted","Data":"de5715f01ec7f8bd752a455ceeac7844b9912e726156b8572141c448ebeea943"} Jan 22 13:26:32 crc kubenswrapper[4773]: I0122 13:26:32.471067 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6","Type":"ContainerStarted","Data":"d1cedf39e486a8591cdc229c040a96ee244841bd4430f16e5c21dd1305436266"} Jan 22 13:26:32 crc kubenswrapper[4773]: I0122 13:26:32.499345 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.499316722 podStartE2EDuration="2.499316722s" podCreationTimestamp="2026-01-22 13:26:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:26:32.499016224 +0000 UTC m=+5500.077132109" watchObservedRunningTime="2026-01-22 13:26:32.499316722 +0000 UTC m=+5500.077432557" Jan 22 13:26:32 crc kubenswrapper[4773]: I0122 13:26:32.504262 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="99948424-1ea5-4591-8fe7-62871febc6e6" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" Jan 22 13:26:32 crc kubenswrapper[4773]: I0122 13:26:32.675947 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99948424-1ea5-4591-8fe7-62871febc6e6" path="/var/lib/kubelet/pods/99948424-1ea5-4591-8fe7-62871febc6e6/volumes" Jan 22 13:26:34 crc kubenswrapper[4773]: I0122 13:26:34.074620 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:26:34 crc kubenswrapper[4773]: I0122 13:26:34.076227 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.075039 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.075874 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.075969 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.077193 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"22f45b11bbee937fdf871ae5de53af8235a219a99f6dd311534489dc7fbcd516"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.077346 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://22f45b11bbee937fdf871ae5de53af8235a219a99f6dd311534489dc7fbcd516" gracePeriod=600 Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.767757 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="22f45b11bbee937fdf871ae5de53af8235a219a99f6dd311534489dc7fbcd516" exitCode=0 Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.767831 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"22f45b11bbee937fdf871ae5de53af8235a219a99f6dd311534489dc7fbcd516"} Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.768122 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506"} Jan 22 13:27:04 crc kubenswrapper[4773]: I0122 13:27:04.768145 4773 scope.go:117] "RemoveContainer" containerID="e333eae0ea54df966e06ec4b1f5bc28b222cf05bdef924776730918482f3cd47" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.127822 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6h2nt"] Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.130488 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.153053 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6h2nt"] Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.228009 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-utilities\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.228603 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-catalog-content\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.228795 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gbpg\" (UniqueName: \"kubernetes.io/projected/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-kube-api-access-6gbpg\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.330548 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-utilities\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.330651 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-catalog-content\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.330726 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gbpg\" (UniqueName: \"kubernetes.io/projected/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-kube-api-access-6gbpg\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.331590 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-catalog-content\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.332907 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-utilities\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.352374 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gbpg\" (UniqueName: \"kubernetes.io/projected/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-kube-api-access-6gbpg\") pod \"redhat-marketplace-6h2nt\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.451797 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:49 crc kubenswrapper[4773]: I0122 13:27:49.930657 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6h2nt"] Jan 22 13:27:50 crc kubenswrapper[4773]: I0122 13:27:50.178840 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6h2nt" event={"ID":"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6","Type":"ContainerStarted","Data":"335d863e6bb3c6c46403eb032f6f3f1e0f5bfd702ff6b99a141d26e5f61b9118"} Jan 22 13:27:51 crc kubenswrapper[4773]: I0122 13:27:51.186101 4773 generic.go:334] "Generic (PLEG): container finished" podID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerID="49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31" exitCode=0 Jan 22 13:27:51 crc kubenswrapper[4773]: I0122 13:27:51.186161 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6h2nt" event={"ID":"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6","Type":"ContainerDied","Data":"49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31"} Jan 22 13:27:53 crc kubenswrapper[4773]: I0122 13:27:53.210600 4773 generic.go:334] "Generic (PLEG): container finished" podID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerID="074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465" exitCode=0 Jan 22 13:27:53 crc kubenswrapper[4773]: I0122 13:27:53.214412 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6h2nt" event={"ID":"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6","Type":"ContainerDied","Data":"074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465"} Jan 22 13:27:55 crc kubenswrapper[4773]: I0122 13:27:55.241264 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6h2nt" event={"ID":"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6","Type":"ContainerStarted","Data":"d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d"} Jan 22 13:27:55 crc kubenswrapper[4773]: I0122 13:27:55.268669 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6h2nt" podStartSLOduration=3.5436421190000003 podStartE2EDuration="6.268635252s" podCreationTimestamp="2026-01-22 13:27:49 +0000 UTC" firstStartedPulling="2026-01-22 13:27:51.18920262 +0000 UTC m=+5578.767318445" lastFinishedPulling="2026-01-22 13:27:53.914195753 +0000 UTC m=+5581.492311578" observedRunningTime="2026-01-22 13:27:55.26677966 +0000 UTC m=+5582.844895535" watchObservedRunningTime="2026-01-22 13:27:55.268635252 +0000 UTC m=+5582.846751117" Jan 22 13:27:59 crc kubenswrapper[4773]: I0122 13:27:59.452979 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:59 crc kubenswrapper[4773]: I0122 13:27:59.453678 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:27:59 crc kubenswrapper[4773]: I0122 13:27:59.505375 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:28:00 crc kubenswrapper[4773]: I0122 13:28:00.328459 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:28:00 crc kubenswrapper[4773]: I0122 13:28:00.389675 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6h2nt"] Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.321174 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6h2nt" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="registry-server" containerID="cri-o://d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d" gracePeriod=2 Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.773687 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.811770 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gbpg\" (UniqueName: \"kubernetes.io/projected/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-kube-api-access-6gbpg\") pod \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.811984 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-utilities\") pod \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.812086 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-catalog-content\") pod \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\" (UID: \"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6\") " Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.813444 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-utilities" (OuterVolumeSpecName: "utilities") pod "a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" (UID: "a2b5e659-e0c0-4c24-b721-8962a4dfe0d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.819753 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-kube-api-access-6gbpg" (OuterVolumeSpecName: "kube-api-access-6gbpg") pod "a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" (UID: "a2b5e659-e0c0-4c24-b721-8962a4dfe0d6"). InnerVolumeSpecName "kube-api-access-6gbpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.908579 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" (UID: "a2b5e659-e0c0-4c24-b721-8962a4dfe0d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.915075 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.915196 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gbpg\" (UniqueName: \"kubernetes.io/projected/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-kube-api-access-6gbpg\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:02 crc kubenswrapper[4773]: I0122 13:28:02.915271 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.333492 4773 generic.go:334] "Generic (PLEG): container finished" podID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerID="d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d" exitCode=0 Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.333565 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6h2nt" event={"ID":"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6","Type":"ContainerDied","Data":"d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d"} Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.333654 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6h2nt" event={"ID":"a2b5e659-e0c0-4c24-b721-8962a4dfe0d6","Type":"ContainerDied","Data":"335d863e6bb3c6c46403eb032f6f3f1e0f5bfd702ff6b99a141d26e5f61b9118"} Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.333678 4773 scope.go:117] "RemoveContainer" containerID="d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.333591 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6h2nt" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.352148 4773 scope.go:117] "RemoveContainer" containerID="074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.376655 4773 scope.go:117] "RemoveContainer" containerID="49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.431944 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6h2nt"] Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.438935 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6h2nt"] Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.446476 4773 scope.go:117] "RemoveContainer" containerID="d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d" Jan 22 13:28:03 crc kubenswrapper[4773]: E0122 13:28:03.447480 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d\": container with ID starting with d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d not found: ID does not exist" containerID="d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.447526 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d"} err="failed to get container status \"d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d\": rpc error: code = NotFound desc = could not find container \"d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d\": container with ID starting with d06ff1028327327e16e61c9e565dcc48cd1fcaf4113ebfbe44f099bfd58d192d not found: ID does not exist" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.447559 4773 scope.go:117] "RemoveContainer" containerID="074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465" Jan 22 13:28:03 crc kubenswrapper[4773]: E0122 13:28:03.447969 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465\": container with ID starting with 074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465 not found: ID does not exist" containerID="074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.447993 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465"} err="failed to get container status \"074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465\": rpc error: code = NotFound desc = could not find container \"074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465\": container with ID starting with 074974b4d34eb39c606c5f19622d4ad164dd7cbd79895e4ee237b6d2d0797465 not found: ID does not exist" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.448027 4773 scope.go:117] "RemoveContainer" containerID="49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31" Jan 22 13:28:03 crc kubenswrapper[4773]: E0122 13:28:03.448468 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31\": container with ID starting with 49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31 not found: ID does not exist" containerID="49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31" Jan 22 13:28:03 crc kubenswrapper[4773]: I0122 13:28:03.448498 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31"} err="failed to get container status \"49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31\": rpc error: code = NotFound desc = could not find container \"49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31\": container with ID starting with 49ae6066509b096a52fc87e47b35262372dc3ab3fa9cff10331ac9bea0892b31 not found: ID does not exist" Jan 22 13:28:04 crc kubenswrapper[4773]: I0122 13:28:04.685019 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" path="/var/lib/kubelet/pods/a2b5e659-e0c0-4c24-b721-8962a4dfe0d6/volumes" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.498590 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-kndjf"] Jan 22 13:28:12 crc kubenswrapper[4773]: E0122 13:28:12.499459 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="extract-content" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.499473 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="extract-content" Jan 22 13:28:12 crc kubenswrapper[4773]: E0122 13:28:12.499485 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="extract-utilities" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.499492 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="extract-utilities" Jan 22 13:28:12 crc kubenswrapper[4773]: E0122 13:28:12.499507 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="registry-server" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.499512 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="registry-server" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.499683 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2b5e659-e0c0-4c24-b721-8962a4dfe0d6" containerName="registry-server" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.500509 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.507863 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kndjf"] Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.597031 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2754-account-create-update-xqxq8"] Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.598312 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.600191 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.608513 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6147fb59-c7e4-4131-b427-9b9e121541a0-operator-scripts\") pod \"barbican-db-create-kndjf\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.608759 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dw4x\" (UniqueName: \"kubernetes.io/projected/6147fb59-c7e4-4131-b427-9b9e121541a0-kube-api-access-9dw4x\") pod \"barbican-db-create-kndjf\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.610239 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2754-account-create-update-xqxq8"] Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.710779 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35c73005-43eb-4c3c-ab06-2a5fff27524d-operator-scripts\") pod \"barbican-2754-account-create-update-xqxq8\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.710859 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6147fb59-c7e4-4131-b427-9b9e121541a0-operator-scripts\") pod \"barbican-db-create-kndjf\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.711013 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpvlt\" (UniqueName: \"kubernetes.io/projected/35c73005-43eb-4c3c-ab06-2a5fff27524d-kube-api-access-jpvlt\") pod \"barbican-2754-account-create-update-xqxq8\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.711416 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dw4x\" (UniqueName: \"kubernetes.io/projected/6147fb59-c7e4-4131-b427-9b9e121541a0-kube-api-access-9dw4x\") pod \"barbican-db-create-kndjf\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.712910 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6147fb59-c7e4-4131-b427-9b9e121541a0-operator-scripts\") pod \"barbican-db-create-kndjf\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.729203 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dw4x\" (UniqueName: \"kubernetes.io/projected/6147fb59-c7e4-4131-b427-9b9e121541a0-kube-api-access-9dw4x\") pod \"barbican-db-create-kndjf\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.812970 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35c73005-43eb-4c3c-ab06-2a5fff27524d-operator-scripts\") pod \"barbican-2754-account-create-update-xqxq8\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.813042 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpvlt\" (UniqueName: \"kubernetes.io/projected/35c73005-43eb-4c3c-ab06-2a5fff27524d-kube-api-access-jpvlt\") pod \"barbican-2754-account-create-update-xqxq8\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.813990 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35c73005-43eb-4c3c-ab06-2a5fff27524d-operator-scripts\") pod \"barbican-2754-account-create-update-xqxq8\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.817040 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.834017 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpvlt\" (UniqueName: \"kubernetes.io/projected/35c73005-43eb-4c3c-ab06-2a5fff27524d-kube-api-access-jpvlt\") pod \"barbican-2754-account-create-update-xqxq8\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:12 crc kubenswrapper[4773]: I0122 13:28:12.929224 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:13 crc kubenswrapper[4773]: I0122 13:28:13.053194 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kndjf"] Jan 22 13:28:13 crc kubenswrapper[4773]: I0122 13:28:13.374716 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2754-account-create-update-xqxq8"] Jan 22 13:28:13 crc kubenswrapper[4773]: I0122 13:28:13.423423 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2754-account-create-update-xqxq8" event={"ID":"35c73005-43eb-4c3c-ab06-2a5fff27524d","Type":"ContainerStarted","Data":"e1094547945f3cec0e45ab2589db5f8dd4c539a41f001d1f44e38bfaeb862a47"} Jan 22 13:28:13 crc kubenswrapper[4773]: I0122 13:28:13.424869 4773 generic.go:334] "Generic (PLEG): container finished" podID="6147fb59-c7e4-4131-b427-9b9e121541a0" containerID="144f4f2a462f9ff41c5298d990126301bdbc821ed8a59f706182ceb307d666b3" exitCode=0 Jan 22 13:28:13 crc kubenswrapper[4773]: I0122 13:28:13.424907 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kndjf" event={"ID":"6147fb59-c7e4-4131-b427-9b9e121541a0","Type":"ContainerDied","Data":"144f4f2a462f9ff41c5298d990126301bdbc821ed8a59f706182ceb307d666b3"} Jan 22 13:28:13 crc kubenswrapper[4773]: I0122 13:28:13.424928 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kndjf" event={"ID":"6147fb59-c7e4-4131-b427-9b9e121541a0","Type":"ContainerStarted","Data":"276deba78d78cf1c5cc2f284cb649211170c8afded0031f452c0a75060abd180"} Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.439673 4773 generic.go:334] "Generic (PLEG): container finished" podID="35c73005-43eb-4c3c-ab06-2a5fff27524d" containerID="57f54bfb38247bec792c1024bbda41dfa392b8d89501bc7218dc262913a2da4a" exitCode=0 Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.440271 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2754-account-create-update-xqxq8" event={"ID":"35c73005-43eb-4c3c-ab06-2a5fff27524d","Type":"ContainerDied","Data":"57f54bfb38247bec792c1024bbda41dfa392b8d89501bc7218dc262913a2da4a"} Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.746063 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.866860 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dw4x\" (UniqueName: \"kubernetes.io/projected/6147fb59-c7e4-4131-b427-9b9e121541a0-kube-api-access-9dw4x\") pod \"6147fb59-c7e4-4131-b427-9b9e121541a0\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.867031 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6147fb59-c7e4-4131-b427-9b9e121541a0-operator-scripts\") pod \"6147fb59-c7e4-4131-b427-9b9e121541a0\" (UID: \"6147fb59-c7e4-4131-b427-9b9e121541a0\") " Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.868096 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6147fb59-c7e4-4131-b427-9b9e121541a0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6147fb59-c7e4-4131-b427-9b9e121541a0" (UID: "6147fb59-c7e4-4131-b427-9b9e121541a0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.879437 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6147fb59-c7e4-4131-b427-9b9e121541a0-kube-api-access-9dw4x" (OuterVolumeSpecName: "kube-api-access-9dw4x") pod "6147fb59-c7e4-4131-b427-9b9e121541a0" (UID: "6147fb59-c7e4-4131-b427-9b9e121541a0"). InnerVolumeSpecName "kube-api-access-9dw4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.969370 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dw4x\" (UniqueName: \"kubernetes.io/projected/6147fb59-c7e4-4131-b427-9b9e121541a0-kube-api-access-9dw4x\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:14 crc kubenswrapper[4773]: I0122 13:28:14.969421 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6147fb59-c7e4-4131-b427-9b9e121541a0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.451320 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kndjf" event={"ID":"6147fb59-c7e4-4131-b427-9b9e121541a0","Type":"ContainerDied","Data":"276deba78d78cf1c5cc2f284cb649211170c8afded0031f452c0a75060abd180"} Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.451368 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="276deba78d78cf1c5cc2f284cb649211170c8afded0031f452c0a75060abd180" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.451369 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kndjf" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.719683 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.782035 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpvlt\" (UniqueName: \"kubernetes.io/projected/35c73005-43eb-4c3c-ab06-2a5fff27524d-kube-api-access-jpvlt\") pod \"35c73005-43eb-4c3c-ab06-2a5fff27524d\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.782217 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35c73005-43eb-4c3c-ab06-2a5fff27524d-operator-scripts\") pod \"35c73005-43eb-4c3c-ab06-2a5fff27524d\" (UID: \"35c73005-43eb-4c3c-ab06-2a5fff27524d\") " Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.783172 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35c73005-43eb-4c3c-ab06-2a5fff27524d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "35c73005-43eb-4c3c-ab06-2a5fff27524d" (UID: "35c73005-43eb-4c3c-ab06-2a5fff27524d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.791734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35c73005-43eb-4c3c-ab06-2a5fff27524d-kube-api-access-jpvlt" (OuterVolumeSpecName: "kube-api-access-jpvlt") pod "35c73005-43eb-4c3c-ab06-2a5fff27524d" (UID: "35c73005-43eb-4c3c-ab06-2a5fff27524d"). InnerVolumeSpecName "kube-api-access-jpvlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.885143 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpvlt\" (UniqueName: \"kubernetes.io/projected/35c73005-43eb-4c3c-ab06-2a5fff27524d-kube-api-access-jpvlt\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:15 crc kubenswrapper[4773]: I0122 13:28:15.885212 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/35c73005-43eb-4c3c-ab06-2a5fff27524d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:16 crc kubenswrapper[4773]: I0122 13:28:16.461966 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2754-account-create-update-xqxq8" event={"ID":"35c73005-43eb-4c3c-ab06-2a5fff27524d","Type":"ContainerDied","Data":"e1094547945f3cec0e45ab2589db5f8dd4c539a41f001d1f44e38bfaeb862a47"} Jan 22 13:28:16 crc kubenswrapper[4773]: I0122 13:28:16.462011 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2754-account-create-update-xqxq8" Jan 22 13:28:16 crc kubenswrapper[4773]: I0122 13:28:16.462025 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1094547945f3cec0e45ab2589db5f8dd4c539a41f001d1f44e38bfaeb862a47" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.914216 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-5zkb4"] Jan 22 13:28:17 crc kubenswrapper[4773]: E0122 13:28:17.915778 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6147fb59-c7e4-4131-b427-9b9e121541a0" containerName="mariadb-database-create" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.915921 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6147fb59-c7e4-4131-b427-9b9e121541a0" containerName="mariadb-database-create" Jan 22 13:28:17 crc kubenswrapper[4773]: E0122 13:28:17.916087 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c73005-43eb-4c3c-ab06-2a5fff27524d" containerName="mariadb-account-create-update" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.916212 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c73005-43eb-4c3c-ab06-2a5fff27524d" containerName="mariadb-account-create-update" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.916634 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="35c73005-43eb-4c3c-ab06-2a5fff27524d" containerName="mariadb-account-create-update" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.916775 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6147fb59-c7e4-4131-b427-9b9e121541a0" containerName="mariadb-database-create" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.917811 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.922557 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-t94zn" Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.924742 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5zkb4"] Jan 22 13:28:17 crc kubenswrapper[4773]: I0122 13:28:17.928852 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.028228 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-db-sync-config-data\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.028426 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9vgg\" (UniqueName: \"kubernetes.io/projected/bff5894d-1433-4016-9be9-e42afa5f8061-kube-api-access-k9vgg\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.028470 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-combined-ca-bundle\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.130623 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-db-sync-config-data\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.131988 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9vgg\" (UniqueName: \"kubernetes.io/projected/bff5894d-1433-4016-9be9-e42afa5f8061-kube-api-access-k9vgg\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.132517 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-combined-ca-bundle\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.136737 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-combined-ca-bundle\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.158402 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-db-sync-config-data\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.161623 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9vgg\" (UniqueName: \"kubernetes.io/projected/bff5894d-1433-4016-9be9-e42afa5f8061-kube-api-access-k9vgg\") pod \"barbican-db-sync-5zkb4\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.239143 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:18 crc kubenswrapper[4773]: I0122 13:28:18.656042 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5zkb4"] Jan 22 13:28:19 crc kubenswrapper[4773]: I0122 13:28:19.486425 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5zkb4" event={"ID":"bff5894d-1433-4016-9be9-e42afa5f8061","Type":"ContainerStarted","Data":"bb1253c9131006089af2a9bb941c795ab501dcff494cb73288e67aa4172a2c97"} Jan 22 13:28:19 crc kubenswrapper[4773]: I0122 13:28:19.486768 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5zkb4" event={"ID":"bff5894d-1433-4016-9be9-e42afa5f8061","Type":"ContainerStarted","Data":"824dd126cb1a7d8935772f8c57ca4542f8dda6598072fb4096802e9e14ba5061"} Jan 22 13:28:19 crc kubenswrapper[4773]: I0122 13:28:19.504947 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-5zkb4" podStartSLOduration=2.504924461 podStartE2EDuration="2.504924461s" podCreationTimestamp="2026-01-22 13:28:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:19.500972569 +0000 UTC m=+5607.079088424" watchObservedRunningTime="2026-01-22 13:28:19.504924461 +0000 UTC m=+5607.083040286" Jan 22 13:28:22 crc kubenswrapper[4773]: I0122 13:28:22.519642 4773 generic.go:334] "Generic (PLEG): container finished" podID="bff5894d-1433-4016-9be9-e42afa5f8061" containerID="bb1253c9131006089af2a9bb941c795ab501dcff494cb73288e67aa4172a2c97" exitCode=0 Jan 22 13:28:22 crc kubenswrapper[4773]: I0122 13:28:22.519759 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5zkb4" event={"ID":"bff5894d-1433-4016-9be9-e42afa5f8061","Type":"ContainerDied","Data":"bb1253c9131006089af2a9bb941c795ab501dcff494cb73288e67aa4172a2c97"} Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.818948 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.944613 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-db-sync-config-data\") pod \"bff5894d-1433-4016-9be9-e42afa5f8061\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.944762 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9vgg\" (UniqueName: \"kubernetes.io/projected/bff5894d-1433-4016-9be9-e42afa5f8061-kube-api-access-k9vgg\") pod \"bff5894d-1433-4016-9be9-e42afa5f8061\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.944785 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-combined-ca-bundle\") pod \"bff5894d-1433-4016-9be9-e42afa5f8061\" (UID: \"bff5894d-1433-4016-9be9-e42afa5f8061\") " Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.958446 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bff5894d-1433-4016-9be9-e42afa5f8061-kube-api-access-k9vgg" (OuterVolumeSpecName: "kube-api-access-k9vgg") pod "bff5894d-1433-4016-9be9-e42afa5f8061" (UID: "bff5894d-1433-4016-9be9-e42afa5f8061"). InnerVolumeSpecName "kube-api-access-k9vgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.962981 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "bff5894d-1433-4016-9be9-e42afa5f8061" (UID: "bff5894d-1433-4016-9be9-e42afa5f8061"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:28:23 crc kubenswrapper[4773]: I0122 13:28:23.970321 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bff5894d-1433-4016-9be9-e42afa5f8061" (UID: "bff5894d-1433-4016-9be9-e42afa5f8061"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.046870 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9vgg\" (UniqueName: \"kubernetes.io/projected/bff5894d-1433-4016-9be9-e42afa5f8061-kube-api-access-k9vgg\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.046947 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.046960 4773 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/bff5894d-1433-4016-9be9-e42afa5f8061-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.540356 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5zkb4" event={"ID":"bff5894d-1433-4016-9be9-e42afa5f8061","Type":"ContainerDied","Data":"824dd126cb1a7d8935772f8c57ca4542f8dda6598072fb4096802e9e14ba5061"} Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.540707 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="824dd126cb1a7d8935772f8c57ca4542f8dda6598072fb4096802e9e14ba5061" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.540589 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5zkb4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.764970 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5ff89786dc-wphr7"] Jan 22 13:28:24 crc kubenswrapper[4773]: E0122 13:28:24.765737 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bff5894d-1433-4016-9be9-e42afa5f8061" containerName="barbican-db-sync" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.765849 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bff5894d-1433-4016-9be9-e42afa5f8061" containerName="barbican-db-sync" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.766129 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bff5894d-1433-4016-9be9-e42afa5f8061" containerName="barbican-db-sync" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.767300 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.775714 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.775912 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-t94zn" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.776147 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.787727 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6bd66988c8-tblx4"] Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.789179 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.801152 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.810002 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5ff89786dc-wphr7"] Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.823807 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6bd66988c8-tblx4"] Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865424 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3655d186-9110-4e00-b952-c587afca8c0f-logs\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865508 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-combined-ca-bundle\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865583 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnggw\" (UniqueName: \"kubernetes.io/projected/3655d186-9110-4e00-b952-c587afca8c0f-kube-api-access-bnggw\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865619 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10dffa1a-2b65-4a28-a990-5ed8a0db0943-logs\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865682 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-config-data\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865746 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-config-data-custom\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865774 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-config-data\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865798 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-combined-ca-bundle\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865842 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-config-data-custom\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.865879 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t66d\" (UniqueName: \"kubernetes.io/projected/10dffa1a-2b65-4a28-a990-5ed8a0db0943-kube-api-access-6t66d\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.885181 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bcd8f6955-c5dwc"] Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.887522 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.922591 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bcd8f6955-c5dwc"] Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967246 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-config-data\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967322 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-sb\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967361 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-config-data-custom\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967383 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-config-data\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967399 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-combined-ca-bundle\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967432 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8dmt\" (UniqueName: \"kubernetes.io/projected/b1748b82-0bd9-4c75-9291-77d4ef414d48-kube-api-access-t8dmt\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967449 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-config-data-custom\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967473 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t66d\" (UniqueName: \"kubernetes.io/projected/10dffa1a-2b65-4a28-a990-5ed8a0db0943-kube-api-access-6t66d\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967497 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3655d186-9110-4e00-b952-c587afca8c0f-logs\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-config\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967531 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-dns-svc\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967557 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-combined-ca-bundle\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967596 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnggw\" (UniqueName: \"kubernetes.io/projected/3655d186-9110-4e00-b952-c587afca8c0f-kube-api-access-bnggw\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967613 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-nb\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.967634 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10dffa1a-2b65-4a28-a990-5ed8a0db0943-logs\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.970637 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10dffa1a-2b65-4a28-a990-5ed8a0db0943-logs\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.971277 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3655d186-9110-4e00-b952-c587afca8c0f-logs\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.975169 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-config-data-custom\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.981663 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-config-data-custom\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.983692 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-config-data\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.984273 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3655d186-9110-4e00-b952-c587afca8c0f-combined-ca-bundle\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:24 crc kubenswrapper[4773]: I0122 13:28:24.996718 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-config-data\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.011275 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10dffa1a-2b65-4a28-a990-5ed8a0db0943-combined-ca-bundle\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.012097 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-85bff9f988-hdktq"] Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.016276 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.023082 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnggw\" (UniqueName: \"kubernetes.io/projected/3655d186-9110-4e00-b952-c587afca8c0f-kube-api-access-bnggw\") pod \"barbican-keystone-listener-6bd66988c8-tblx4\" (UID: \"3655d186-9110-4e00-b952-c587afca8c0f\") " pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.023829 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t66d\" (UniqueName: \"kubernetes.io/projected/10dffa1a-2b65-4a28-a990-5ed8a0db0943-kube-api-access-6t66d\") pod \"barbican-worker-5ff89786dc-wphr7\" (UID: \"10dffa1a-2b65-4a28-a990-5ed8a0db0943\") " pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.024303 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.026753 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-85bff9f988-hdktq"] Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071386 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-combined-ca-bundle\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071432 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-sb\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071493 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8dmt\" (UniqueName: \"kubernetes.io/projected/b1748b82-0bd9-4c75-9291-77d4ef414d48-kube-api-access-t8dmt\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071525 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071542 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847ed33a-6b79-402b-9dc9-d60eedc29753-logs\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071565 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-config\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071582 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-dns-svc\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071611 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpl9d\" (UniqueName: \"kubernetes.io/projected/847ed33a-6b79-402b-9dc9-d60eedc29753-kube-api-access-gpl9d\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071658 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-nb\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.071673 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data-custom\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.072594 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-sb\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.073445 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-config\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.074741 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-dns-svc\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.078334 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-nb\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.096850 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5ff89786dc-wphr7" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.105210 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8dmt\" (UniqueName: \"kubernetes.io/projected/b1748b82-0bd9-4c75-9291-77d4ef414d48-kube-api-access-t8dmt\") pod \"dnsmasq-dns-7bcd8f6955-c5dwc\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.117818 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.173429 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.173470 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847ed33a-6b79-402b-9dc9-d60eedc29753-logs\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.173507 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpl9d\" (UniqueName: \"kubernetes.io/projected/847ed33a-6b79-402b-9dc9-d60eedc29753-kube-api-access-gpl9d\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.173558 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data-custom\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.173603 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-combined-ca-bundle\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.174257 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847ed33a-6b79-402b-9dc9-d60eedc29753-logs\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.181014 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.188027 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data-custom\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.190349 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-combined-ca-bundle\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.205504 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpl9d\" (UniqueName: \"kubernetes.io/projected/847ed33a-6b79-402b-9dc9-d60eedc29753-kube-api-access-gpl9d\") pod \"barbican-api-85bff9f988-hdktq\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.232465 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.413753 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.640839 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5ff89786dc-wphr7"] Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.658006 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bcd8f6955-c5dwc"] Jan 22 13:28:25 crc kubenswrapper[4773]: W0122 13:28:25.701652 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1748b82_0bd9_4c75_9291_77d4ef414d48.slice/crio-4b479532bb9817bef3f9e749c937d8b3525d467e8c5f1f0108b86cc88487f635 WatchSource:0}: Error finding container 4b479532bb9817bef3f9e749c937d8b3525d467e8c5f1f0108b86cc88487f635: Status 404 returned error can't find the container with id 4b479532bb9817bef3f9e749c937d8b3525d467e8c5f1f0108b86cc88487f635 Jan 22 13:28:25 crc kubenswrapper[4773]: I0122 13:28:25.831971 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6bd66988c8-tblx4"] Jan 22 13:28:25 crc kubenswrapper[4773]: W0122 13:28:25.858032 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3655d186_9110_4e00_b952_c587afca8c0f.slice/crio-4a8d9256fa8dff0e22c21debb20e78e51e433349a1318501369e364c86ac3978 WatchSource:0}: Error finding container 4a8d9256fa8dff0e22c21debb20e78e51e433349a1318501369e364c86ac3978: Status 404 returned error can't find the container with id 4a8d9256fa8dff0e22c21debb20e78e51e433349a1318501369e364c86ac3978 Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.074737 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-85bff9f988-hdktq"] Jan 22 13:28:26 crc kubenswrapper[4773]: W0122 13:28:26.085910 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod847ed33a_6b79_402b_9dc9_d60eedc29753.slice/crio-bdda60c5d7790f4467a76e68000e305e3936b95f7d5f84cdd8b8fb5fa3cea25e WatchSource:0}: Error finding container bdda60c5d7790f4467a76e68000e305e3936b95f7d5f84cdd8b8fb5fa3cea25e: Status 404 returned error can't find the container with id bdda60c5d7790f4467a76e68000e305e3936b95f7d5f84cdd8b8fb5fa3cea25e Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.561079 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" event={"ID":"3655d186-9110-4e00-b952-c587afca8c0f","Type":"ContainerStarted","Data":"5ca70cca41843b5d99d3f4d1c077ef5748b93dedb853fe14ca218ae9fb76bc0b"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.561439 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" event={"ID":"3655d186-9110-4e00-b952-c587afca8c0f","Type":"ContainerStarted","Data":"f09521c92047e9ca1f972dbd2a92ddd33fac9a86fe16dbdd5d582b41078f2871"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.561456 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" event={"ID":"3655d186-9110-4e00-b952-c587afca8c0f","Type":"ContainerStarted","Data":"4a8d9256fa8dff0e22c21debb20e78e51e433349a1318501369e364c86ac3978"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.563180 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5ff89786dc-wphr7" event={"ID":"10dffa1a-2b65-4a28-a990-5ed8a0db0943","Type":"ContainerStarted","Data":"edda0671db64ef1d19e5e16d3ab7c946664efdfa044878db1afab829d0dffcbe"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.563216 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5ff89786dc-wphr7" event={"ID":"10dffa1a-2b65-4a28-a990-5ed8a0db0943","Type":"ContainerStarted","Data":"7c7a9c0a12d2b48c1d2cbafd193211a6d5ea80021c91c14ab6e55862307b7a1f"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.563228 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5ff89786dc-wphr7" event={"ID":"10dffa1a-2b65-4a28-a990-5ed8a0db0943","Type":"ContainerStarted","Data":"02273b9da753e6668d484fb634dac69e7ce8234820b711e1f77720ee025397e6"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.565872 4773 generic.go:334] "Generic (PLEG): container finished" podID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerID="2b2c87b3fdf746d3f2dec3fe7ba7a8d9353f7e371fd2acca35efd04d91d02264" exitCode=0 Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.565921 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" event={"ID":"b1748b82-0bd9-4c75-9291-77d4ef414d48","Type":"ContainerDied","Data":"2b2c87b3fdf746d3f2dec3fe7ba7a8d9353f7e371fd2acca35efd04d91d02264"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.565939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" event={"ID":"b1748b82-0bd9-4c75-9291-77d4ef414d48","Type":"ContainerStarted","Data":"4b479532bb9817bef3f9e749c937d8b3525d467e8c5f1f0108b86cc88487f635"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.569410 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85bff9f988-hdktq" event={"ID":"847ed33a-6b79-402b-9dc9-d60eedc29753","Type":"ContainerStarted","Data":"ef6322d5b78e1af70abe3d0712134e0ec7c71262555e9f4f55fb51dc069f7f73"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.569469 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85bff9f988-hdktq" event={"ID":"847ed33a-6b79-402b-9dc9-d60eedc29753","Type":"ContainerStarted","Data":"bdda60c5d7790f4467a76e68000e305e3936b95f7d5f84cdd8b8fb5fa3cea25e"} Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.583360 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6bd66988c8-tblx4" podStartSLOduration=2.583340529 podStartE2EDuration="2.583340529s" podCreationTimestamp="2026-01-22 13:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:26.579935853 +0000 UTC m=+5614.158051668" watchObservedRunningTime="2026-01-22 13:28:26.583340529 +0000 UTC m=+5614.161456514" Jan 22 13:28:26 crc kubenswrapper[4773]: I0122 13:28:26.605514 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5ff89786dc-wphr7" podStartSLOduration=2.605490734 podStartE2EDuration="2.605490734s" podCreationTimestamp="2026-01-22 13:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:26.602242082 +0000 UTC m=+5614.180357907" watchObservedRunningTime="2026-01-22 13:28:26.605490734 +0000 UTC m=+5614.183606559" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.149727 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7f865655b4-mkvnv"] Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.154056 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.157356 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.157725 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.161066 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f865655b4-mkvnv"] Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.225789 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v52p6\" (UniqueName: \"kubernetes.io/projected/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-kube-api-access-v52p6\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.225837 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-logs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.225886 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-config-data-custom\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.225922 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-combined-ca-bundle\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.225963 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-public-tls-certs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.226005 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-config-data\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.226026 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-internal-tls-certs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.327303 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-config-data-custom\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.327568 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-combined-ca-bundle\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.327676 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-public-tls-certs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.327776 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-config-data\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.327854 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-internal-tls-certs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.327975 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v52p6\" (UniqueName: \"kubernetes.io/projected/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-kube-api-access-v52p6\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.328054 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-logs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.328906 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-logs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.337256 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-config-data-custom\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.338200 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-config-data\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.338484 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-public-tls-certs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.341876 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-combined-ca-bundle\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.348944 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-internal-tls-certs\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.357126 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v52p6\" (UniqueName: \"kubernetes.io/projected/2af597d8-3c79-478e-aa8d-1bdeea5c2ba3-kube-api-access-v52p6\") pod \"barbican-api-7f865655b4-mkvnv\" (UID: \"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3\") " pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.481573 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.607501 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" event={"ID":"b1748b82-0bd9-4c75-9291-77d4ef414d48","Type":"ContainerStarted","Data":"9fc64e39289e259d8a2b9160916a03bedc174d8e61615a18ec7881231bb6e63e"} Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.607722 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.627668 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85bff9f988-hdktq" event={"ID":"847ed33a-6b79-402b-9dc9-d60eedc29753","Type":"ContainerStarted","Data":"8e9e0459ce1ca388b60d8c093ba87272543be447c572cc2ee660fe09a782a585"} Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.630268 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.651924 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" podStartSLOduration=3.651892958 podStartE2EDuration="3.651892958s" podCreationTimestamp="2026-01-22 13:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:27.643042458 +0000 UTC m=+5615.221158283" watchObservedRunningTime="2026-01-22 13:28:27.651892958 +0000 UTC m=+5615.230008793" Jan 22 13:28:27 crc kubenswrapper[4773]: I0122 13:28:27.676294 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-85bff9f988-hdktq" podStartSLOduration=3.6762530460000002 podStartE2EDuration="3.676253046s" podCreationTimestamp="2026-01-22 13:28:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:27.659979176 +0000 UTC m=+5615.238095001" watchObservedRunningTime="2026-01-22 13:28:27.676253046 +0000 UTC m=+5615.254368881" Jan 22 13:28:28 crc kubenswrapper[4773]: W0122 13:28:28.024491 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2af597d8_3c79_478e_aa8d_1bdeea5c2ba3.slice/crio-b7e8bf7c3e6b13c19609f0d989ef920c7eced9dbdb81e023bd71a848729b96fe WatchSource:0}: Error finding container b7e8bf7c3e6b13c19609f0d989ef920c7eced9dbdb81e023bd71a848729b96fe: Status 404 returned error can't find the container with id b7e8bf7c3e6b13c19609f0d989ef920c7eced9dbdb81e023bd71a848729b96fe Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.025923 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f865655b4-mkvnv"] Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.639278 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f865655b4-mkvnv" event={"ID":"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3","Type":"ContainerStarted","Data":"56c4c1178dbc32d66dbdc696dc4a946123f0ef6c1ccaf854e2f0346ca0a50517"} Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.639663 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f865655b4-mkvnv" event={"ID":"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3","Type":"ContainerStarted","Data":"eb9a2bb3ac3a600779cbb033b8a446a4baac9b6804912fa4748c8d8a79c54785"} Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.639684 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f865655b4-mkvnv" event={"ID":"2af597d8-3c79-478e-aa8d-1bdeea5c2ba3","Type":"ContainerStarted","Data":"b7e8bf7c3e6b13c19609f0d989ef920c7eced9dbdb81e023bd71a848729b96fe"} Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.640147 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.640194 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:28 crc kubenswrapper[4773]: I0122 13:28:28.640211 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.234500 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.265723 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7f865655b4-mkvnv" podStartSLOduration=8.265691598 podStartE2EDuration="8.265691598s" podCreationTimestamp="2026-01-22 13:28:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:28.672037672 +0000 UTC m=+5616.250153497" watchObservedRunningTime="2026-01-22 13:28:35.265691598 +0000 UTC m=+5622.843807453" Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.292920 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c84f5549f-bw82x"] Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.293247 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" containerName="dnsmasq-dns" containerID="cri-o://af03ccc60cd307473b7d0c13da8e6c55ad53bf94aed3f875560bf5e204324481" gracePeriod=10 Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.711426 4773 generic.go:334] "Generic (PLEG): container finished" podID="3ae9528a-e508-435e-b609-cb8108e195d9" containerID="af03ccc60cd307473b7d0c13da8e6c55ad53bf94aed3f875560bf5e204324481" exitCode=0 Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.711910 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" event={"ID":"3ae9528a-e508-435e-b609-cb8108e195d9","Type":"ContainerDied","Data":"af03ccc60cd307473b7d0c13da8e6c55ad53bf94aed3f875560bf5e204324481"} Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.904634 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.923645 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-sb\") pod \"3ae9528a-e508-435e-b609-cb8108e195d9\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.923721 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-nb\") pod \"3ae9528a-e508-435e-b609-cb8108e195d9\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.923937 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-config\") pod \"3ae9528a-e508-435e-b609-cb8108e195d9\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.923987 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-dns-svc\") pod \"3ae9528a-e508-435e-b609-cb8108e195d9\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.924062 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlfxw\" (UniqueName: \"kubernetes.io/projected/3ae9528a-e508-435e-b609-cb8108e195d9-kube-api-access-zlfxw\") pod \"3ae9528a-e508-435e-b609-cb8108e195d9\" (UID: \"3ae9528a-e508-435e-b609-cb8108e195d9\") " Jan 22 13:28:35 crc kubenswrapper[4773]: I0122 13:28:35.935803 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ae9528a-e508-435e-b609-cb8108e195d9-kube-api-access-zlfxw" (OuterVolumeSpecName: "kube-api-access-zlfxw") pod "3ae9528a-e508-435e-b609-cb8108e195d9" (UID: "3ae9528a-e508-435e-b609-cb8108e195d9"). InnerVolumeSpecName "kube-api-access-zlfxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.002382 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-config" (OuterVolumeSpecName: "config") pod "3ae9528a-e508-435e-b609-cb8108e195d9" (UID: "3ae9528a-e508-435e-b609-cb8108e195d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.011866 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3ae9528a-e508-435e-b609-cb8108e195d9" (UID: "3ae9528a-e508-435e-b609-cb8108e195d9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.012367 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3ae9528a-e508-435e-b609-cb8108e195d9" (UID: "3ae9528a-e508-435e-b609-cb8108e195d9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.026754 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.026803 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.026820 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlfxw\" (UniqueName: \"kubernetes.io/projected/3ae9528a-e508-435e-b609-cb8108e195d9-kube-api-access-zlfxw\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.026837 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.033952 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3ae9528a-e508-435e-b609-cb8108e195d9" (UID: "3ae9528a-e508-435e-b609-cb8108e195d9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.128912 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3ae9528a-e508-435e-b609-cb8108e195d9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.722965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" event={"ID":"3ae9528a-e508-435e-b609-cb8108e195d9","Type":"ContainerDied","Data":"26b69d1403deb49fa430f29b9a2d5214b450e94f3bc35f8c1ba52cb9f0027df0"} Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.723331 4773 scope.go:117] "RemoveContainer" containerID="af03ccc60cd307473b7d0c13da8e6c55ad53bf94aed3f875560bf5e204324481" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.723228 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c84f5549f-bw82x" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.753411 4773 scope.go:117] "RemoveContainer" containerID="75fa8f812bfbd3031dcea5554a21c495e7693e3380fef40bfba41f0b7268a17b" Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.768169 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c84f5549f-bw82x"] Jan 22 13:28:36 crc kubenswrapper[4773]: I0122 13:28:36.797516 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c84f5549f-bw82x"] Jan 22 13:28:37 crc kubenswrapper[4773]: I0122 13:28:37.130451 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:37 crc kubenswrapper[4773]: I0122 13:28:37.237580 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:38 crc kubenswrapper[4773]: I0122 13:28:38.673273 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" path="/var/lib/kubelet/pods/3ae9528a-e508-435e-b609-cb8108e195d9/volumes" Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.101789 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.241103 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f865655b4-mkvnv" Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.330224 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-85bff9f988-hdktq"] Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.332198 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-85bff9f988-hdktq" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api-log" containerID="cri-o://ef6322d5b78e1af70abe3d0712134e0ec7c71262555e9f4f55fb51dc069f7f73" gracePeriod=30 Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.332772 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-85bff9f988-hdktq" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api" containerID="cri-o://8e9e0459ce1ca388b60d8c093ba87272543be447c572cc2ee660fe09a782a585" gracePeriod=30 Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.751277 4773 generic.go:334] "Generic (PLEG): container finished" podID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerID="ef6322d5b78e1af70abe3d0712134e0ec7c71262555e9f4f55fb51dc069f7f73" exitCode=143 Jan 22 13:28:39 crc kubenswrapper[4773]: I0122 13:28:39.751429 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85bff9f988-hdktq" event={"ID":"847ed33a-6b79-402b-9dc9-d60eedc29753","Type":"ContainerDied","Data":"ef6322d5b78e1af70abe3d0712134e0ec7c71262555e9f4f55fb51dc069f7f73"} Jan 22 13:28:42 crc kubenswrapper[4773]: I0122 13:28:42.624229 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85bff9f988-hdktq" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.38:9311/healthcheck\": read tcp 10.217.0.2:54356->10.217.1.38:9311: read: connection reset by peer" Jan 22 13:28:42 crc kubenswrapper[4773]: I0122 13:28:42.624303 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-85bff9f988-hdktq" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.38:9311/healthcheck\": read tcp 10.217.0.2:54340->10.217.1.38:9311: read: connection reset by peer" Jan 22 13:28:42 crc kubenswrapper[4773]: I0122 13:28:42.800960 4773 generic.go:334] "Generic (PLEG): container finished" podID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerID="8e9e0459ce1ca388b60d8c093ba87272543be447c572cc2ee660fe09a782a585" exitCode=0 Jan 22 13:28:42 crc kubenswrapper[4773]: I0122 13:28:42.801009 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85bff9f988-hdktq" event={"ID":"847ed33a-6b79-402b-9dc9-d60eedc29753","Type":"ContainerDied","Data":"8e9e0459ce1ca388b60d8c093ba87272543be447c572cc2ee660fe09a782a585"} Jan 22 13:28:42 crc kubenswrapper[4773]: I0122 13:28:42.975271 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.072519 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data\") pod \"847ed33a-6b79-402b-9dc9-d60eedc29753\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.072634 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-combined-ca-bundle\") pod \"847ed33a-6b79-402b-9dc9-d60eedc29753\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.072686 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpl9d\" (UniqueName: \"kubernetes.io/projected/847ed33a-6b79-402b-9dc9-d60eedc29753-kube-api-access-gpl9d\") pod \"847ed33a-6b79-402b-9dc9-d60eedc29753\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.072716 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data-custom\") pod \"847ed33a-6b79-402b-9dc9-d60eedc29753\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.072778 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847ed33a-6b79-402b-9dc9-d60eedc29753-logs\") pod \"847ed33a-6b79-402b-9dc9-d60eedc29753\" (UID: \"847ed33a-6b79-402b-9dc9-d60eedc29753\") " Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.074034 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/847ed33a-6b79-402b-9dc9-d60eedc29753-logs" (OuterVolumeSpecName: "logs") pod "847ed33a-6b79-402b-9dc9-d60eedc29753" (UID: "847ed33a-6b79-402b-9dc9-d60eedc29753"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.075244 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847ed33a-6b79-402b-9dc9-d60eedc29753-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.086658 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/847ed33a-6b79-402b-9dc9-d60eedc29753-kube-api-access-gpl9d" (OuterVolumeSpecName: "kube-api-access-gpl9d") pod "847ed33a-6b79-402b-9dc9-d60eedc29753" (UID: "847ed33a-6b79-402b-9dc9-d60eedc29753"). InnerVolumeSpecName "kube-api-access-gpl9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.094460 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "847ed33a-6b79-402b-9dc9-d60eedc29753" (UID: "847ed33a-6b79-402b-9dc9-d60eedc29753"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.137811 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "847ed33a-6b79-402b-9dc9-d60eedc29753" (UID: "847ed33a-6b79-402b-9dc9-d60eedc29753"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.185082 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.185123 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpl9d\" (UniqueName: \"kubernetes.io/projected/847ed33a-6b79-402b-9dc9-d60eedc29753-kube-api-access-gpl9d\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.185134 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.218460 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data" (OuterVolumeSpecName: "config-data") pod "847ed33a-6b79-402b-9dc9-d60eedc29753" (UID: "847ed33a-6b79-402b-9dc9-d60eedc29753"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.287021 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847ed33a-6b79-402b-9dc9-d60eedc29753-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.812267 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-85bff9f988-hdktq" event={"ID":"847ed33a-6b79-402b-9dc9-d60eedc29753","Type":"ContainerDied","Data":"bdda60c5d7790f4467a76e68000e305e3936b95f7d5f84cdd8b8fb5fa3cea25e"} Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.812343 4773 scope.go:117] "RemoveContainer" containerID="8e9e0459ce1ca388b60d8c093ba87272543be447c572cc2ee660fe09a782a585" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.812463 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-85bff9f988-hdktq" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.850135 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-85bff9f988-hdktq"] Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.851141 4773 scope.go:117] "RemoveContainer" containerID="ef6322d5b78e1af70abe3d0712134e0ec7c71262555e9f4f55fb51dc069f7f73" Jan 22 13:28:43 crc kubenswrapper[4773]: I0122 13:28:43.857192 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-85bff9f988-hdktq"] Jan 22 13:28:44 crc kubenswrapper[4773]: I0122 13:28:44.668089 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" path="/var/lib/kubelet/pods/847ed33a-6b79-402b-9dc9-d60eedc29753/volumes" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.535818 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-mxljw"] Jan 22 13:28:46 crc kubenswrapper[4773]: E0122 13:28:46.536393 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536407 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api" Jan 22 13:28:46 crc kubenswrapper[4773]: E0122 13:28:46.536440 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" containerName="dnsmasq-dns" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536446 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" containerName="dnsmasq-dns" Jan 22 13:28:46 crc kubenswrapper[4773]: E0122 13:28:46.536454 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api-log" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536459 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api-log" Jan 22 13:28:46 crc kubenswrapper[4773]: E0122 13:28:46.536478 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" containerName="init" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536483 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" containerName="init" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536634 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ae9528a-e508-435e-b609-cb8108e195d9" containerName="dnsmasq-dns" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536655 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.536664 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="847ed33a-6b79-402b-9dc9-d60eedc29753" containerName="barbican-api-log" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.537264 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.548677 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mxljw"] Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.644760 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7abc-account-create-update-qx5vc"] Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.646179 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.648748 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.651724 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7abc-account-create-update-qx5vc"] Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.692868 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-operator-scripts\") pod \"neutron-db-create-mxljw\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.692926 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9cq4\" (UniqueName: \"kubernetes.io/projected/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-kube-api-access-m9cq4\") pod \"neutron-db-create-mxljw\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.794956 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmzdc\" (UniqueName: \"kubernetes.io/projected/64ceda11-8e56-4d64-9940-8cca049a03b6-kube-api-access-hmzdc\") pod \"neutron-7abc-account-create-update-qx5vc\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.795056 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64ceda11-8e56-4d64-9940-8cca049a03b6-operator-scripts\") pod \"neutron-7abc-account-create-update-qx5vc\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.795095 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-operator-scripts\") pod \"neutron-db-create-mxljw\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.795168 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9cq4\" (UniqueName: \"kubernetes.io/projected/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-kube-api-access-m9cq4\") pod \"neutron-db-create-mxljw\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.796582 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-operator-scripts\") pod \"neutron-db-create-mxljw\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.813974 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9cq4\" (UniqueName: \"kubernetes.io/projected/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-kube-api-access-m9cq4\") pod \"neutron-db-create-mxljw\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.893057 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.897629 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64ceda11-8e56-4d64-9940-8cca049a03b6-operator-scripts\") pod \"neutron-7abc-account-create-update-qx5vc\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.897801 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmzdc\" (UniqueName: \"kubernetes.io/projected/64ceda11-8e56-4d64-9940-8cca049a03b6-kube-api-access-hmzdc\") pod \"neutron-7abc-account-create-update-qx5vc\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.898911 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64ceda11-8e56-4d64-9940-8cca049a03b6-operator-scripts\") pod \"neutron-7abc-account-create-update-qx5vc\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.926432 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmzdc\" (UniqueName: \"kubernetes.io/projected/64ceda11-8e56-4d64-9940-8cca049a03b6-kube-api-access-hmzdc\") pod \"neutron-7abc-account-create-update-qx5vc\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:46 crc kubenswrapper[4773]: I0122 13:28:46.959871 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:47 crc kubenswrapper[4773]: I0122 13:28:47.535126 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7abc-account-create-update-qx5vc"] Jan 22 13:28:47 crc kubenswrapper[4773]: W0122 13:28:47.543937 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64ceda11_8e56_4d64_9940_8cca049a03b6.slice/crio-34e5534b669a515c8a281b430a2f00cfbefdb2b5b36dc5d7ca7efc0f9e0c0ccb WatchSource:0}: Error finding container 34e5534b669a515c8a281b430a2f00cfbefdb2b5b36dc5d7ca7efc0f9e0c0ccb: Status 404 returned error can't find the container with id 34e5534b669a515c8a281b430a2f00cfbefdb2b5b36dc5d7ca7efc0f9e0c0ccb Jan 22 13:28:47 crc kubenswrapper[4773]: I0122 13:28:47.654979 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mxljw"] Jan 22 13:28:47 crc kubenswrapper[4773]: I0122 13:28:47.852089 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7abc-account-create-update-qx5vc" event={"ID":"64ceda11-8e56-4d64-9940-8cca049a03b6","Type":"ContainerStarted","Data":"34e5534b669a515c8a281b430a2f00cfbefdb2b5b36dc5d7ca7efc0f9e0c0ccb"} Jan 22 13:28:47 crc kubenswrapper[4773]: I0122 13:28:47.854829 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mxljw" event={"ID":"b4f8a4ca-4ec1-41ee-ac71-a319e1590452","Type":"ContainerStarted","Data":"252dbb1c3871da107c4e8cc6291b7f490ff6878ffa953a913ba28302e03db619"} Jan 22 13:28:48 crc kubenswrapper[4773]: I0122 13:28:48.974314 4773 generic.go:334] "Generic (PLEG): container finished" podID="64ceda11-8e56-4d64-9940-8cca049a03b6" containerID="5d92204b8da9af240262eed1d62e22d7c9f26a08d661e04bed96f179b763132f" exitCode=0 Jan 22 13:28:48 crc kubenswrapper[4773]: I0122 13:28:48.974508 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7abc-account-create-update-qx5vc" event={"ID":"64ceda11-8e56-4d64-9940-8cca049a03b6","Type":"ContainerDied","Data":"5d92204b8da9af240262eed1d62e22d7c9f26a08d661e04bed96f179b763132f"} Jan 22 13:28:48 crc kubenswrapper[4773]: I0122 13:28:48.976670 4773 generic.go:334] "Generic (PLEG): container finished" podID="b4f8a4ca-4ec1-41ee-ac71-a319e1590452" containerID="0643999f89bb71dceba31e605d43e2cca362efaa3b63b043692b81eff1bba382" exitCode=0 Jan 22 13:28:48 crc kubenswrapper[4773]: I0122 13:28:48.976716 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mxljw" event={"ID":"b4f8a4ca-4ec1-41ee-ac71-a319e1590452","Type":"ContainerDied","Data":"0643999f89bb71dceba31e605d43e2cca362efaa3b63b043692b81eff1bba382"} Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.402171 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.408539 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.579876 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9cq4\" (UniqueName: \"kubernetes.io/projected/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-kube-api-access-m9cq4\") pod \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.579979 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64ceda11-8e56-4d64-9940-8cca049a03b6-operator-scripts\") pod \"64ceda11-8e56-4d64-9940-8cca049a03b6\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.580003 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmzdc\" (UniqueName: \"kubernetes.io/projected/64ceda11-8e56-4d64-9940-8cca049a03b6-kube-api-access-hmzdc\") pod \"64ceda11-8e56-4d64-9940-8cca049a03b6\" (UID: \"64ceda11-8e56-4d64-9940-8cca049a03b6\") " Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.580064 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-operator-scripts\") pod \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\" (UID: \"b4f8a4ca-4ec1-41ee-ac71-a319e1590452\") " Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.580971 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64ceda11-8e56-4d64-9940-8cca049a03b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "64ceda11-8e56-4d64-9940-8cca049a03b6" (UID: "64ceda11-8e56-4d64-9940-8cca049a03b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.581029 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b4f8a4ca-4ec1-41ee-ac71-a319e1590452" (UID: "b4f8a4ca-4ec1-41ee-ac71-a319e1590452"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.586299 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64ceda11-8e56-4d64-9940-8cca049a03b6-kube-api-access-hmzdc" (OuterVolumeSpecName: "kube-api-access-hmzdc") pod "64ceda11-8e56-4d64-9940-8cca049a03b6" (UID: "64ceda11-8e56-4d64-9940-8cca049a03b6"). InnerVolumeSpecName "kube-api-access-hmzdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.590512 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-kube-api-access-m9cq4" (OuterVolumeSpecName: "kube-api-access-m9cq4") pod "b4f8a4ca-4ec1-41ee-ac71-a319e1590452" (UID: "b4f8a4ca-4ec1-41ee-ac71-a319e1590452"). InnerVolumeSpecName "kube-api-access-m9cq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.681712 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9cq4\" (UniqueName: \"kubernetes.io/projected/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-kube-api-access-m9cq4\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.681746 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64ceda11-8e56-4d64-9940-8cca049a03b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.681759 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmzdc\" (UniqueName: \"kubernetes.io/projected/64ceda11-8e56-4d64-9940-8cca049a03b6-kube-api-access-hmzdc\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.681772 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4f8a4ca-4ec1-41ee-ac71-a319e1590452-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.998519 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mxljw" event={"ID":"b4f8a4ca-4ec1-41ee-ac71-a319e1590452","Type":"ContainerDied","Data":"252dbb1c3871da107c4e8cc6291b7f490ff6878ffa953a913ba28302e03db619"} Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.998733 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="252dbb1c3871da107c4e8cc6291b7f490ff6878ffa953a913ba28302e03db619" Jan 22 13:28:50 crc kubenswrapper[4773]: I0122 13:28:50.998661 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mxljw" Jan 22 13:28:51 crc kubenswrapper[4773]: I0122 13:28:51.000193 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7abc-account-create-update-qx5vc" event={"ID":"64ceda11-8e56-4d64-9940-8cca049a03b6","Type":"ContainerDied","Data":"34e5534b669a515c8a281b430a2f00cfbefdb2b5b36dc5d7ca7efc0f9e0c0ccb"} Jan 22 13:28:51 crc kubenswrapper[4773]: I0122 13:28:51.000210 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7abc-account-create-update-qx5vc" Jan 22 13:28:51 crc kubenswrapper[4773]: I0122 13:28:51.000237 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34e5534b669a515c8a281b430a2f00cfbefdb2b5b36dc5d7ca7efc0f9e0c0ccb" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.209165 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-s85mn"] Jan 22 13:28:52 crc kubenswrapper[4773]: E0122 13:28:52.209605 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ceda11-8e56-4d64-9940-8cca049a03b6" containerName="mariadb-account-create-update" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.209623 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ceda11-8e56-4d64-9940-8cca049a03b6" containerName="mariadb-account-create-update" Jan 22 13:28:52 crc kubenswrapper[4773]: E0122 13:28:52.209655 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4f8a4ca-4ec1-41ee-ac71-a319e1590452" containerName="mariadb-database-create" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.209665 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4f8a4ca-4ec1-41ee-ac71-a319e1590452" containerName="mariadb-database-create" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.209898 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="64ceda11-8e56-4d64-9940-8cca049a03b6" containerName="mariadb-account-create-update" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.209923 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4f8a4ca-4ec1-41ee-ac71-a319e1590452" containerName="mariadb-database-create" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.210655 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.217380 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cql4v" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.217467 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.218964 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.223540 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-s85mn"] Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.408947 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-config\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.409369 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bffjf\" (UniqueName: \"kubernetes.io/projected/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-kube-api-access-bffjf\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.409427 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-combined-ca-bundle\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.510721 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bffjf\" (UniqueName: \"kubernetes.io/projected/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-kube-api-access-bffjf\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.510806 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-combined-ca-bundle\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.510976 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-config\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.516525 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-config\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.516646 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-combined-ca-bundle\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.529624 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bffjf\" (UniqueName: \"kubernetes.io/projected/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-kube-api-access-bffjf\") pod \"neutron-db-sync-s85mn\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.530419 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-s85mn" Jan 22 13:28:52 crc kubenswrapper[4773]: I0122 13:28:52.972342 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-s85mn"] Jan 22 13:28:52 crc kubenswrapper[4773]: W0122 13:28:52.977576 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb264c7ce_d6d4_4c9b_9cc4_9567db7fd96d.slice/crio-20625ed9f1fca4baf3af659cc99d729cd577ca2dcf5183356c5e208cb1638fae WatchSource:0}: Error finding container 20625ed9f1fca4baf3af659cc99d729cd577ca2dcf5183356c5e208cb1638fae: Status 404 returned error can't find the container with id 20625ed9f1fca4baf3af659cc99d729cd577ca2dcf5183356c5e208cb1638fae Jan 22 13:28:53 crc kubenswrapper[4773]: I0122 13:28:53.017015 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-s85mn" event={"ID":"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d","Type":"ContainerStarted","Data":"20625ed9f1fca4baf3af659cc99d729cd577ca2dcf5183356c5e208cb1638fae"} Jan 22 13:28:54 crc kubenswrapper[4773]: I0122 13:28:54.026527 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-s85mn" event={"ID":"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d","Type":"ContainerStarted","Data":"29157dfab8645a526b3459aa4ae2b08b8d91a86f031a944e164fa54b04fefca7"} Jan 22 13:28:54 crc kubenswrapper[4773]: I0122 13:28:54.050301 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-s85mn" podStartSLOduration=2.050262941 podStartE2EDuration="2.050262941s" podCreationTimestamp="2026-01-22 13:28:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:28:54.049397576 +0000 UTC m=+5641.627513401" watchObservedRunningTime="2026-01-22 13:28:54.050262941 +0000 UTC m=+5641.628378766" Jan 22 13:29:03 crc kubenswrapper[4773]: I0122 13:29:03.147938 4773 generic.go:334] "Generic (PLEG): container finished" podID="b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" containerID="29157dfab8645a526b3459aa4ae2b08b8d91a86f031a944e164fa54b04fefca7" exitCode=0 Jan 22 13:29:03 crc kubenswrapper[4773]: I0122 13:29:03.148227 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-s85mn" event={"ID":"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d","Type":"ContainerDied","Data":"29157dfab8645a526b3459aa4ae2b08b8d91a86f031a944e164fa54b04fefca7"} Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.074573 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.074684 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.516497 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-s85mn" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.629611 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-combined-ca-bundle\") pod \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.629672 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bffjf\" (UniqueName: \"kubernetes.io/projected/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-kube-api-access-bffjf\") pod \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.629813 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-config\") pod \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\" (UID: \"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d\") " Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.635929 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-kube-api-access-bffjf" (OuterVolumeSpecName: "kube-api-access-bffjf") pod "b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" (UID: "b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d"). InnerVolumeSpecName "kube-api-access-bffjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.656982 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-config" (OuterVolumeSpecName: "config") pod "b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" (UID: "b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.671100 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" (UID: "b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.732013 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.732050 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:04 crc kubenswrapper[4773]: I0122 13:29:04.732062 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bffjf\" (UniqueName: \"kubernetes.io/projected/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d-kube-api-access-bffjf\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.164218 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-s85mn" event={"ID":"b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d","Type":"ContainerDied","Data":"20625ed9f1fca4baf3af659cc99d729cd577ca2dcf5183356c5e208cb1638fae"} Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.164271 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20625ed9f1fca4baf3af659cc99d729cd577ca2dcf5183356c5e208cb1638fae" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.164340 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-s85mn" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.505641 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6db946c6ff-qsl54"] Jan 22 13:29:05 crc kubenswrapper[4773]: E0122 13:29:05.506055 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" containerName="neutron-db-sync" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.506079 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" containerName="neutron-db-sync" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.506268 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" containerName="neutron-db-sync" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.507441 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.525858 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6db946c6ff-qsl54"] Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.647932 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-nb\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.648276 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-dns-svc\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.648397 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xqth\" (UniqueName: \"kubernetes.io/projected/9adc3ff0-321e-4763-8e3d-b9767a3b976e-kube-api-access-4xqth\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.648546 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-config\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.648711 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-sb\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.649267 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6d7b6dd764-m9498"] Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.650625 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.652967 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.654237 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.654560 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.655255 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-cql4v" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.680949 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d7b6dd764-m9498"] Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750101 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-sb\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750239 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-ovndb-tls-certs\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750286 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-nb\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750332 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-dns-svc\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750390 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-config\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750445 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-httpd-config\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750478 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xqth\" (UniqueName: \"kubernetes.io/projected/9adc3ff0-321e-4763-8e3d-b9767a3b976e-kube-api-access-4xqth\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750568 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv497\" (UniqueName: \"kubernetes.io/projected/6dd3c78c-e886-415d-85c2-3f41dbc80e76-kube-api-access-sv497\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750647 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-combined-ca-bundle\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.750701 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-config\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.751018 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-sb\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.751352 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-dns-svc\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.751365 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-config\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.751706 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-nb\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.780359 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xqth\" (UniqueName: \"kubernetes.io/projected/9adc3ff0-321e-4763-8e3d-b9767a3b976e-kube-api-access-4xqth\") pod \"dnsmasq-dns-6db946c6ff-qsl54\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.827247 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.853888 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-ovndb-tls-certs\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.854002 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-config\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.854056 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-httpd-config\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.854093 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv497\" (UniqueName: \"kubernetes.io/projected/6dd3c78c-e886-415d-85c2-3f41dbc80e76-kube-api-access-sv497\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.854125 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-combined-ca-bundle\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.859093 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-combined-ca-bundle\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.859640 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-httpd-config\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.859955 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-config\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.875029 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-ovndb-tls-certs\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.879069 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv497\" (UniqueName: \"kubernetes.io/projected/6dd3c78c-e886-415d-85c2-3f41dbc80e76-kube-api-access-sv497\") pod \"neutron-6d7b6dd764-m9498\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:05 crc kubenswrapper[4773]: I0122 13:29:05.968623 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:06 crc kubenswrapper[4773]: I0122 13:29:06.136382 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6db946c6ff-qsl54"] Jan 22 13:29:06 crc kubenswrapper[4773]: I0122 13:29:06.180865 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" event={"ID":"9adc3ff0-321e-4763-8e3d-b9767a3b976e","Type":"ContainerStarted","Data":"83c2421b983d2b95814531eed7250098b115318c825e97a5394ddef2be19aa46"} Jan 22 13:29:06 crc kubenswrapper[4773]: I0122 13:29:06.578761 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d7b6dd764-m9498"] Jan 22 13:29:06 crc kubenswrapper[4773]: W0122 13:29:06.590864 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dd3c78c_e886_415d_85c2_3f41dbc80e76.slice/crio-9ccdb6f5e7814a642cda4a82972abbae5173eef4870bbacf7c9a287b2134630c WatchSource:0}: Error finding container 9ccdb6f5e7814a642cda4a82972abbae5173eef4870bbacf7c9a287b2134630c: Status 404 returned error can't find the container with id 9ccdb6f5e7814a642cda4a82972abbae5173eef4870bbacf7c9a287b2134630c Jan 22 13:29:07 crc kubenswrapper[4773]: I0122 13:29:07.193453 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7b6dd764-m9498" event={"ID":"6dd3c78c-e886-415d-85c2-3f41dbc80e76","Type":"ContainerStarted","Data":"c4e7cea8e461b19111f5513f58ad1a94ca7121238f93d2c3fb337945617d3649"} Jan 22 13:29:07 crc kubenswrapper[4773]: I0122 13:29:07.193818 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7b6dd764-m9498" event={"ID":"6dd3c78c-e886-415d-85c2-3f41dbc80e76","Type":"ContainerStarted","Data":"9ccdb6f5e7814a642cda4a82972abbae5173eef4870bbacf7c9a287b2134630c"} Jan 22 13:29:07 crc kubenswrapper[4773]: I0122 13:29:07.194878 4773 generic.go:334] "Generic (PLEG): container finished" podID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerID="3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02" exitCode=0 Jan 22 13:29:07 crc kubenswrapper[4773]: I0122 13:29:07.194908 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" event={"ID":"9adc3ff0-321e-4763-8e3d-b9767a3b976e","Type":"ContainerDied","Data":"3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02"} Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.210436 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7b6dd764-m9498" event={"ID":"6dd3c78c-e886-415d-85c2-3f41dbc80e76","Type":"ContainerStarted","Data":"2fec4985a50f65fd524dd8aa7e9660bb6bd58047c22583ca78054fc0a34a0239"} Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.210774 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.213724 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" event={"ID":"9adc3ff0-321e-4763-8e3d-b9767a3b976e","Type":"ContainerStarted","Data":"f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1"} Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.213976 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.226470 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5c649475bf-5dr4c"] Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.228501 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.230692 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.234764 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.252883 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c649475bf-5dr4c"] Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.258764 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6d7b6dd764-m9498" podStartSLOduration=3.258732621 podStartE2EDuration="3.258732621s" podCreationTimestamp="2026-01-22 13:29:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:29:08.235084856 +0000 UTC m=+5655.813200681" watchObservedRunningTime="2026-01-22 13:29:08.258732621 +0000 UTC m=+5655.836848446" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.292851 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" podStartSLOduration=3.292830759 podStartE2EDuration="3.292830759s" podCreationTimestamp="2026-01-22 13:29:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:29:08.285523274 +0000 UTC m=+5655.863639099" watchObservedRunningTime="2026-01-22 13:29:08.292830759 +0000 UTC m=+5655.870946584" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399717 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-config\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399765 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-ovndb-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399807 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph57p\" (UniqueName: \"kubernetes.io/projected/1a2c7ca2-94cf-4302-8bad-8593c9906521-kube-api-access-ph57p\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399838 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-httpd-config\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399866 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-public-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399898 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-combined-ca-bundle\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.399938 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-internal-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.502395 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph57p\" (UniqueName: \"kubernetes.io/projected/1a2c7ca2-94cf-4302-8bad-8593c9906521-kube-api-access-ph57p\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.502749 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-httpd-config\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.502794 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-public-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.502831 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-combined-ca-bundle\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.502892 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-internal-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.502998 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-config\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.503022 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-ovndb-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.527161 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-httpd-config\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.532665 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-ovndb-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.533159 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-combined-ca-bundle\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.533224 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-internal-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.550072 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph57p\" (UniqueName: \"kubernetes.io/projected/1a2c7ca2-94cf-4302-8bad-8593c9906521-kube-api-access-ph57p\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.551044 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-public-tls-certs\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.558234 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a2c7ca2-94cf-4302-8bad-8593c9906521-config\") pod \"neutron-5c649475bf-5dr4c\" (UID: \"1a2c7ca2-94cf-4302-8bad-8593c9906521\") " pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:08 crc kubenswrapper[4773]: I0122 13:29:08.558716 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:09 crc kubenswrapper[4773]: I0122 13:29:09.193858 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5c649475bf-5dr4c"] Jan 22 13:29:09 crc kubenswrapper[4773]: I0122 13:29:09.226683 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c649475bf-5dr4c" event={"ID":"1a2c7ca2-94cf-4302-8bad-8593c9906521","Type":"ContainerStarted","Data":"8383e57c5a8be58931e560d058e75fee8ff97ade03dc968f953401cdf54ece08"} Jan 22 13:29:10 crc kubenswrapper[4773]: I0122 13:29:10.062443 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rcpnr"] Jan 22 13:29:10 crc kubenswrapper[4773]: I0122 13:29:10.071368 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rcpnr"] Jan 22 13:29:10 crc kubenswrapper[4773]: I0122 13:29:10.238123 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c649475bf-5dr4c" event={"ID":"1a2c7ca2-94cf-4302-8bad-8593c9906521","Type":"ContainerStarted","Data":"15d9d45e9fb8305c58dc6b2d8e17a500edd03a85282d034d26d48070293a5c7b"} Jan 22 13:29:10 crc kubenswrapper[4773]: I0122 13:29:10.238179 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5c649475bf-5dr4c" event={"ID":"1a2c7ca2-94cf-4302-8bad-8593c9906521","Type":"ContainerStarted","Data":"cb040d9a9c0ee147712ceba020be0c5759e76c969ea17fa0ee64a749ce578548"} Jan 22 13:29:10 crc kubenswrapper[4773]: I0122 13:29:10.671047 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="074aa48d-e0e1-4a84-9861-3a5b207868f8" path="/var/lib/kubelet/pods/074aa48d-e0e1-4a84-9861-3a5b207868f8/volumes" Jan 22 13:29:11 crc kubenswrapper[4773]: I0122 13:29:11.246709 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:11 crc kubenswrapper[4773]: I0122 13:29:11.269527 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5c649475bf-5dr4c" podStartSLOduration=3.269507621 podStartE2EDuration="3.269507621s" podCreationTimestamp="2026-01-22 13:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:29:11.267195056 +0000 UTC m=+5658.845310881" watchObservedRunningTime="2026-01-22 13:29:11.269507621 +0000 UTC m=+5658.847623446" Jan 22 13:29:15 crc kubenswrapper[4773]: I0122 13:29:15.834519 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:29:15 crc kubenswrapper[4773]: I0122 13:29:15.896140 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bcd8f6955-c5dwc"] Jan 22 13:29:15 crc kubenswrapper[4773]: I0122 13:29:15.899081 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerName="dnsmasq-dns" containerID="cri-o://9fc64e39289e259d8a2b9160916a03bedc174d8e61615a18ec7881231bb6e63e" gracePeriod=10 Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.291171 4773 generic.go:334] "Generic (PLEG): container finished" podID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerID="9fc64e39289e259d8a2b9160916a03bedc174d8e61615a18ec7881231bb6e63e" exitCode=0 Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.291567 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" event={"ID":"b1748b82-0bd9-4c75-9291-77d4ef414d48","Type":"ContainerDied","Data":"9fc64e39289e259d8a2b9160916a03bedc174d8e61615a18ec7881231bb6e63e"} Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.291601 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" event={"ID":"b1748b82-0bd9-4c75-9291-77d4ef414d48","Type":"ContainerDied","Data":"4b479532bb9817bef3f9e749c937d8b3525d467e8c5f1f0108b86cc88487f635"} Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.291616 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b479532bb9817bef3f9e749c937d8b3525d467e8c5f1f0108b86cc88487f635" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.372591 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.492258 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-dns-svc\") pod \"b1748b82-0bd9-4c75-9291-77d4ef414d48\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.492338 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-nb\") pod \"b1748b82-0bd9-4c75-9291-77d4ef414d48\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.492446 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-config\") pod \"b1748b82-0bd9-4c75-9291-77d4ef414d48\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.492465 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-sb\") pod \"b1748b82-0bd9-4c75-9291-77d4ef414d48\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.492547 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8dmt\" (UniqueName: \"kubernetes.io/projected/b1748b82-0bd9-4c75-9291-77d4ef414d48-kube-api-access-t8dmt\") pod \"b1748b82-0bd9-4c75-9291-77d4ef414d48\" (UID: \"b1748b82-0bd9-4c75-9291-77d4ef414d48\") " Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.502983 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1748b82-0bd9-4c75-9291-77d4ef414d48-kube-api-access-t8dmt" (OuterVolumeSpecName: "kube-api-access-t8dmt") pod "b1748b82-0bd9-4c75-9291-77d4ef414d48" (UID: "b1748b82-0bd9-4c75-9291-77d4ef414d48"). InnerVolumeSpecName "kube-api-access-t8dmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.540622 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b1748b82-0bd9-4c75-9291-77d4ef414d48" (UID: "b1748b82-0bd9-4c75-9291-77d4ef414d48"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.547911 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b1748b82-0bd9-4c75-9291-77d4ef414d48" (UID: "b1748b82-0bd9-4c75-9291-77d4ef414d48"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.558181 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b1748b82-0bd9-4c75-9291-77d4ef414d48" (UID: "b1748b82-0bd9-4c75-9291-77d4ef414d48"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.563047 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-config" (OuterVolumeSpecName: "config") pod "b1748b82-0bd9-4c75-9291-77d4ef414d48" (UID: "b1748b82-0bd9-4c75-9291-77d4ef414d48"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.594499 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.594559 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.594581 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8dmt\" (UniqueName: \"kubernetes.io/projected/b1748b82-0bd9-4c75-9291-77d4ef414d48-kube-api-access-t8dmt\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.594600 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:16 crc kubenswrapper[4773]: I0122 13:29:16.594618 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b1748b82-0bd9-4c75-9291-77d4ef414d48-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:17 crc kubenswrapper[4773]: I0122 13:29:17.299978 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bcd8f6955-c5dwc" Jan 22 13:29:17 crc kubenswrapper[4773]: I0122 13:29:17.326904 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bcd8f6955-c5dwc"] Jan 22 13:29:17 crc kubenswrapper[4773]: I0122 13:29:17.336630 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bcd8f6955-c5dwc"] Jan 22 13:29:18 crc kubenswrapper[4773]: I0122 13:29:18.670450 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" path="/var/lib/kubelet/pods/b1748b82-0bd9-4c75-9291-77d4ef414d48/volumes" Jan 22 13:29:30 crc kubenswrapper[4773]: I0122 13:29:30.959404 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tdn97"] Jan 22 13:29:30 crc kubenswrapper[4773]: E0122 13:29:30.960331 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerName="init" Jan 22 13:29:30 crc kubenswrapper[4773]: I0122 13:29:30.960344 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerName="init" Jan 22 13:29:30 crc kubenswrapper[4773]: E0122 13:29:30.960353 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerName="dnsmasq-dns" Jan 22 13:29:30 crc kubenswrapper[4773]: I0122 13:29:30.960362 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerName="dnsmasq-dns" Jan 22 13:29:30 crc kubenswrapper[4773]: I0122 13:29:30.960524 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1748b82-0bd9-4c75-9291-77d4ef414d48" containerName="dnsmasq-dns" Jan 22 13:29:30 crc kubenswrapper[4773]: I0122 13:29:30.961770 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:30 crc kubenswrapper[4773]: I0122 13:29:30.975174 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tdn97"] Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.085974 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qxvt\" (UniqueName: \"kubernetes.io/projected/8c04dd22-513f-4508-8660-744c2bbd66f8-kube-api-access-6qxvt\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.086079 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-utilities\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.086233 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-catalog-content\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.187827 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-catalog-content\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.187877 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-catalog-content\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.187984 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qxvt\" (UniqueName: \"kubernetes.io/projected/8c04dd22-513f-4508-8660-744c2bbd66f8-kube-api-access-6qxvt\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.188009 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-utilities\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.188395 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-utilities\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.208863 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qxvt\" (UniqueName: \"kubernetes.io/projected/8c04dd22-513f-4508-8660-744c2bbd66f8-kube-api-access-6qxvt\") pod \"community-operators-tdn97\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.289296 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:31 crc kubenswrapper[4773]: I0122 13:29:31.821692 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tdn97"] Jan 22 13:29:32 crc kubenswrapper[4773]: I0122 13:29:32.470655 4773 generic.go:334] "Generic (PLEG): container finished" podID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerID="fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a" exitCode=0 Jan 22 13:29:32 crc kubenswrapper[4773]: I0122 13:29:32.470758 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerDied","Data":"fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a"} Jan 22 13:29:32 crc kubenswrapper[4773]: I0122 13:29:32.471020 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerStarted","Data":"5537cc1c8edc904868859ddaaf64cec448589e132f56fa415f9fc731c2ab88a2"} Jan 22 13:29:33 crc kubenswrapper[4773]: I0122 13:29:33.492766 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerStarted","Data":"005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5"} Jan 22 13:29:34 crc kubenswrapper[4773]: I0122 13:29:34.074792 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:29:34 crc kubenswrapper[4773]: I0122 13:29:34.074894 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:29:34 crc kubenswrapper[4773]: I0122 13:29:34.506529 4773 generic.go:334] "Generic (PLEG): container finished" podID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerID="005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5" exitCode=0 Jan 22 13:29:34 crc kubenswrapper[4773]: I0122 13:29:34.506574 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerDied","Data":"005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5"} Jan 22 13:29:35 crc kubenswrapper[4773]: I0122 13:29:35.519666 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerStarted","Data":"81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359"} Jan 22 13:29:35 crc kubenswrapper[4773]: I0122 13:29:35.544810 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tdn97" podStartSLOduration=3.01662976 podStartE2EDuration="5.544785657s" podCreationTimestamp="2026-01-22 13:29:30 +0000 UTC" firstStartedPulling="2026-01-22 13:29:32.472831927 +0000 UTC m=+5680.050947752" lastFinishedPulling="2026-01-22 13:29:35.000987824 +0000 UTC m=+5682.579103649" observedRunningTime="2026-01-22 13:29:35.535920148 +0000 UTC m=+5683.114035973" watchObservedRunningTime="2026-01-22 13:29:35.544785657 +0000 UTC m=+5683.122901482" Jan 22 13:29:35 crc kubenswrapper[4773]: I0122 13:29:35.978811 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:38 crc kubenswrapper[4773]: I0122 13:29:38.574170 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5c649475bf-5dr4c" Jan 22 13:29:38 crc kubenswrapper[4773]: I0122 13:29:38.660026 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6d7b6dd764-m9498"] Jan 22 13:29:38 crc kubenswrapper[4773]: I0122 13:29:38.660306 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6d7b6dd764-m9498" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-api" containerID="cri-o://c4e7cea8e461b19111f5513f58ad1a94ca7121238f93d2c3fb337945617d3649" gracePeriod=30 Jan 22 13:29:38 crc kubenswrapper[4773]: I0122 13:29:38.665438 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6d7b6dd764-m9498" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-httpd" containerID="cri-o://2fec4985a50f65fd524dd8aa7e9660bb6bd58047c22583ca78054fc0a34a0239" gracePeriod=30 Jan 22 13:29:39 crc kubenswrapper[4773]: I0122 13:29:39.580415 4773 generic.go:334] "Generic (PLEG): container finished" podID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerID="2fec4985a50f65fd524dd8aa7e9660bb6bd58047c22583ca78054fc0a34a0239" exitCode=0 Jan 22 13:29:39 crc kubenswrapper[4773]: I0122 13:29:39.580458 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7b6dd764-m9498" event={"ID":"6dd3c78c-e886-415d-85c2-3f41dbc80e76","Type":"ContainerDied","Data":"2fec4985a50f65fd524dd8aa7e9660bb6bd58047c22583ca78054fc0a34a0239"} Jan 22 13:29:41 crc kubenswrapper[4773]: I0122 13:29:41.290083 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:41 crc kubenswrapper[4773]: I0122 13:29:41.290355 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:41 crc kubenswrapper[4773]: I0122 13:29:41.332851 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:41 crc kubenswrapper[4773]: I0122 13:29:41.645857 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:41 crc kubenswrapper[4773]: I0122 13:29:41.703048 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tdn97"] Jan 22 13:29:43 crc kubenswrapper[4773]: I0122 13:29:43.626706 4773 generic.go:334] "Generic (PLEG): container finished" podID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerID="c4e7cea8e461b19111f5513f58ad1a94ca7121238f93d2c3fb337945617d3649" exitCode=0 Jan 22 13:29:43 crc kubenswrapper[4773]: I0122 13:29:43.627498 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tdn97" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="registry-server" containerID="cri-o://81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359" gracePeriod=2 Jan 22 13:29:43 crc kubenswrapper[4773]: I0122 13:29:43.627899 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7b6dd764-m9498" event={"ID":"6dd3c78c-e886-415d-85c2-3f41dbc80e76","Type":"ContainerDied","Data":"c4e7cea8e461b19111f5513f58ad1a94ca7121238f93d2c3fb337945617d3649"} Jan 22 13:29:43 crc kubenswrapper[4773]: I0122 13:29:43.970730 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.103298 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.127379 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-ovndb-tls-certs\") pod \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.127538 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-combined-ca-bundle\") pod \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.127564 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-config\") pod \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.127579 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-httpd-config\") pod \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.127602 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv497\" (UniqueName: \"kubernetes.io/projected/6dd3c78c-e886-415d-85c2-3f41dbc80e76-kube-api-access-sv497\") pod \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\" (UID: \"6dd3c78c-e886-415d-85c2-3f41dbc80e76\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.133395 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dd3c78c-e886-415d-85c2-3f41dbc80e76-kube-api-access-sv497" (OuterVolumeSpecName: "kube-api-access-sv497") pod "6dd3c78c-e886-415d-85c2-3f41dbc80e76" (UID: "6dd3c78c-e886-415d-85c2-3f41dbc80e76"). InnerVolumeSpecName "kube-api-access-sv497". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.137219 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6dd3c78c-e886-415d-85c2-3f41dbc80e76" (UID: "6dd3c78c-e886-415d-85c2-3f41dbc80e76"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.181660 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dd3c78c-e886-415d-85c2-3f41dbc80e76" (UID: "6dd3c78c-e886-415d-85c2-3f41dbc80e76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.187104 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-config" (OuterVolumeSpecName: "config") pod "6dd3c78c-e886-415d-85c2-3f41dbc80e76" (UID: "6dd3c78c-e886-415d-85c2-3f41dbc80e76"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.204602 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6dd3c78c-e886-415d-85c2-3f41dbc80e76" (UID: "6dd3c78c-e886-415d-85c2-3f41dbc80e76"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.228630 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-utilities\") pod \"8c04dd22-513f-4508-8660-744c2bbd66f8\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.228928 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qxvt\" (UniqueName: \"kubernetes.io/projected/8c04dd22-513f-4508-8660-744c2bbd66f8-kube-api-access-6qxvt\") pod \"8c04dd22-513f-4508-8660-744c2bbd66f8\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.229044 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-catalog-content\") pod \"8c04dd22-513f-4508-8660-744c2bbd66f8\" (UID: \"8c04dd22-513f-4508-8660-744c2bbd66f8\") " Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.229712 4773 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.229826 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.229889 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.229948 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dd3c78c-e886-415d-85c2-3f41dbc80e76-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.230006 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv497\" (UniqueName: \"kubernetes.io/projected/6dd3c78c-e886-415d-85c2-3f41dbc80e76-kube-api-access-sv497\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.230274 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-utilities" (OuterVolumeSpecName: "utilities") pod "8c04dd22-513f-4508-8660-744c2bbd66f8" (UID: "8c04dd22-513f-4508-8660-744c2bbd66f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.233526 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c04dd22-513f-4508-8660-744c2bbd66f8-kube-api-access-6qxvt" (OuterVolumeSpecName: "kube-api-access-6qxvt") pod "8c04dd22-513f-4508-8660-744c2bbd66f8" (UID: "8c04dd22-513f-4508-8660-744c2bbd66f8"). InnerVolumeSpecName "kube-api-access-6qxvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.286081 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c04dd22-513f-4508-8660-744c2bbd66f8" (UID: "8c04dd22-513f-4508-8660-744c2bbd66f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.331472 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.331502 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qxvt\" (UniqueName: \"kubernetes.io/projected/8c04dd22-513f-4508-8660-744c2bbd66f8-kube-api-access-6qxvt\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.331513 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c04dd22-513f-4508-8660-744c2bbd66f8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.638999 4773 generic.go:334] "Generic (PLEG): container finished" podID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerID="81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359" exitCode=0 Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.639078 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerDied","Data":"81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359"} Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.639131 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdn97" event={"ID":"8c04dd22-513f-4508-8660-744c2bbd66f8","Type":"ContainerDied","Data":"5537cc1c8edc904868859ddaaf64cec448589e132f56fa415f9fc731c2ab88a2"} Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.639161 4773 scope.go:117] "RemoveContainer" containerID="81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.640580 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdn97" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.641444 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7b6dd764-m9498" event={"ID":"6dd3c78c-e886-415d-85c2-3f41dbc80e76","Type":"ContainerDied","Data":"9ccdb6f5e7814a642cda4a82972abbae5173eef4870bbacf7c9a287b2134630c"} Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.641483 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7b6dd764-m9498" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.663578 4773 scope.go:117] "RemoveContainer" containerID="005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.693795 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6d7b6dd764-m9498"] Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.705874 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6d7b6dd764-m9498"] Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.712656 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tdn97"] Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.717747 4773 scope.go:117] "RemoveContainer" containerID="fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.719108 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tdn97"] Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.758070 4773 scope.go:117] "RemoveContainer" containerID="81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359" Jan 22 13:29:44 crc kubenswrapper[4773]: E0122 13:29:44.758576 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359\": container with ID starting with 81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359 not found: ID does not exist" containerID="81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.758612 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359"} err="failed to get container status \"81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359\": rpc error: code = NotFound desc = could not find container \"81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359\": container with ID starting with 81d6051c6548c0cc4419cba7540fb66265b30b108973ba4afe459892266ac359 not found: ID does not exist" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.758636 4773 scope.go:117] "RemoveContainer" containerID="005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5" Jan 22 13:29:44 crc kubenswrapper[4773]: E0122 13:29:44.759030 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5\": container with ID starting with 005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5 not found: ID does not exist" containerID="005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.759054 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5"} err="failed to get container status \"005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5\": rpc error: code = NotFound desc = could not find container \"005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5\": container with ID starting with 005b110da705e0d207a736c73dd18fe87a1ce58577dec3dd2b8a32fc2b54f5c5 not found: ID does not exist" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.759071 4773 scope.go:117] "RemoveContainer" containerID="fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a" Jan 22 13:29:44 crc kubenswrapper[4773]: E0122 13:29:44.759324 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a\": container with ID starting with fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a not found: ID does not exist" containerID="fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.759359 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a"} err="failed to get container status \"fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a\": rpc error: code = NotFound desc = could not find container \"fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a\": container with ID starting with fcd9317499914cf84f5643fe1d151bc3a96662d9a994dfb59b0df3d515beb04a not found: ID does not exist" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.759384 4773 scope.go:117] "RemoveContainer" containerID="2fec4985a50f65fd524dd8aa7e9660bb6bd58047c22583ca78054fc0a34a0239" Jan 22 13:29:44 crc kubenswrapper[4773]: I0122 13:29:44.806806 4773 scope.go:117] "RemoveContainer" containerID="c4e7cea8e461b19111f5513f58ad1a94ca7121238f93d2c3fb337945617d3649" Jan 22 13:29:46 crc kubenswrapper[4773]: I0122 13:29:46.668777 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" path="/var/lib/kubelet/pods/6dd3c78c-e886-415d-85c2-3f41dbc80e76/volumes" Jan 22 13:29:46 crc kubenswrapper[4773]: I0122 13:29:46.670189 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" path="/var/lib/kubelet/pods/8c04dd22-513f-4508-8660-744c2bbd66f8/volumes" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.466130 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-jmvhm"] Jan 22 13:29:57 crc kubenswrapper[4773]: E0122 13:29:57.471364 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-api" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471411 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-api" Jan 22 13:29:57 crc kubenswrapper[4773]: E0122 13:29:57.471462 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="extract-content" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471475 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="extract-content" Jan 22 13:29:57 crc kubenswrapper[4773]: E0122 13:29:57.471493 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-httpd" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471502 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-httpd" Jan 22 13:29:57 crc kubenswrapper[4773]: E0122 13:29:57.471537 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="extract-utilities" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471548 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="extract-utilities" Jan 22 13:29:57 crc kubenswrapper[4773]: E0122 13:29:57.471567 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="registry-server" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471574 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="registry-server" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471912 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-httpd" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471938 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dd3c78c-e886-415d-85c2-3f41dbc80e76" containerName="neutron-api" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.471954 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c04dd22-513f-4508-8660-744c2bbd66f8" containerName="registry-server" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.472731 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.480048 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.480256 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.480389 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.481408 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-xk4wd" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.482597 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.519104 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-jmvhm"] Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.562488 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-2k9nw"] Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.563987 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572686 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-scripts\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572737 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-swiftconf\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572754 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-ring-data-devices\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572810 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-dispersionconf\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572827 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhcss\" (UniqueName: \"kubernetes.io/projected/0f853f59-ea9e-4dd0-84b5-fa2051f28396-kube-api-access-nhcss\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572851 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0f853f59-ea9e-4dd0-84b5-fa2051f28396-etc-swift\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.572924 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-combined-ca-bundle\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.603592 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-jmvhm"] Jan 22 13:29:57 crc kubenswrapper[4773]: E0122 13:29:57.604252 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-nhcss ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-jmvhm" podUID="0f853f59-ea9e-4dd0-84b5-fa2051f28396" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.621487 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2k9nw"] Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.676879 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-swiftconf\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.676950 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-combined-ca-bundle\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.676986 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-combined-ca-bundle\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677039 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-dispersionconf\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677069 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-scripts\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677114 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-swiftconf\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677149 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-ring-data-devices\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677181 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2345af62-3a63-4f5d-8a09-e33ee507c372-etc-swift\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677223 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-ring-data-devices\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677254 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-scripts\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677304 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-dispersionconf\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677333 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhcss\" (UniqueName: \"kubernetes.io/projected/0f853f59-ea9e-4dd0-84b5-fa2051f28396-kube-api-access-nhcss\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677366 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0f853f59-ea9e-4dd0-84b5-fa2051f28396-etc-swift\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.677406 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98msh\" (UniqueName: \"kubernetes.io/projected/2345af62-3a63-4f5d-8a09-e33ee507c372-kube-api-access-98msh\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.680009 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0f853f59-ea9e-4dd0-84b5-fa2051f28396-etc-swift\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.680597 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-ring-data-devices\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.681044 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-scripts\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.687908 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-dispersionconf\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.688011 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-846f6dc6f9-zqwqm"] Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.689654 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.690017 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-swiftconf\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.695840 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-846f6dc6f9-zqwqm"] Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.697775 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-combined-ca-bundle\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.707974 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhcss\" (UniqueName: \"kubernetes.io/projected/0f853f59-ea9e-4dd0-84b5-fa2051f28396-kube-api-access-nhcss\") pod \"swift-ring-rebalance-jmvhm\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.776423 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779346 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2345af62-3a63-4f5d-8a09-e33ee507c372-etc-swift\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779392 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-ring-data-devices\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779416 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-scripts\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779502 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98msh\" (UniqueName: \"kubernetes.io/projected/2345af62-3a63-4f5d-8a09-e33ee507c372-kube-api-access-98msh\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779531 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-sb\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779549 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-dns-svc\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779567 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nrrn\" (UniqueName: \"kubernetes.io/projected/3c871e72-423b-4390-930e-843a1f24c2ef-kube-api-access-2nrrn\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779585 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-nb\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779626 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-swiftconf\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779652 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-combined-ca-bundle\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779675 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-config\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.779707 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-dispersionconf\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.782418 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2345af62-3a63-4f5d-8a09-e33ee507c372-etc-swift\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.783159 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-scripts\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.783193 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-ring-data-devices\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.784755 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-dispersionconf\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.792950 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-combined-ca-bundle\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.798159 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-swiftconf\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.811868 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98msh\" (UniqueName: \"kubernetes.io/projected/2345af62-3a63-4f5d-8a09-e33ee507c372-kube-api-access-98msh\") pod \"swift-ring-rebalance-2k9nw\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.873509 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.881718 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-config\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.881846 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-sb\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.881863 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-dns-svc\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.881886 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nrrn\" (UniqueName: \"kubernetes.io/projected/3c871e72-423b-4390-930e-843a1f24c2ef-kube-api-access-2nrrn\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.881904 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-nb\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.882963 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-nb\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.883532 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-config\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.884202 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-dns-svc\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.884441 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-sb\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.895937 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.948261 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nrrn\" (UniqueName: \"kubernetes.io/projected/3c871e72-423b-4390-930e-843a1f24c2ef-kube-api-access-2nrrn\") pod \"dnsmasq-dns-846f6dc6f9-zqwqm\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.985887 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-combined-ca-bundle\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.985957 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-ring-data-devices\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986119 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhcss\" (UniqueName: \"kubernetes.io/projected/0f853f59-ea9e-4dd0-84b5-fa2051f28396-kube-api-access-nhcss\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986240 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0f853f59-ea9e-4dd0-84b5-fa2051f28396-etc-swift\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986267 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-swiftconf\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986338 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-dispersionconf\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986403 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-scripts\") pod \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\" (UID: \"0f853f59-ea9e-4dd0-84b5-fa2051f28396\") " Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986760 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.986946 4773 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.988638 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-scripts" (OuterVolumeSpecName: "scripts") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.988890 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f853f59-ea9e-4dd0-84b5-fa2051f28396-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:29:57 crc kubenswrapper[4773]: I0122 13:29:57.995528 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.006692 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f853f59-ea9e-4dd0-84b5-fa2051f28396-kube-api-access-nhcss" (OuterVolumeSpecName: "kube-api-access-nhcss") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "kube-api-access-nhcss". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.009668 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.011444 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0f853f59-ea9e-4dd0-84b5-fa2051f28396" (UID: "0f853f59-ea9e-4dd0-84b5-fa2051f28396"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.088253 4773 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.088486 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0f853f59-ea9e-4dd0-84b5-fa2051f28396-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.088498 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.088509 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhcss\" (UniqueName: \"kubernetes.io/projected/0f853f59-ea9e-4dd0-84b5-fa2051f28396-kube-api-access-nhcss\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.088518 4773 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0f853f59-ea9e-4dd0-84b5-fa2051f28396-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.088529 4773 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0f853f59-ea9e-4dd0-84b5-fa2051f28396-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.168543 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.498203 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2k9nw"] Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.720182 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-846f6dc6f9-zqwqm"] Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.790939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" event={"ID":"3c871e72-423b-4390-930e-843a1f24c2ef","Type":"ContainerStarted","Data":"f3e4c48175e660a5e469841e1885a13d52cfc60e5a1373436e45d7dd7096efd1"} Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.796822 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-jmvhm" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.799353 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2k9nw" event={"ID":"2345af62-3a63-4f5d-8a09-e33ee507c372","Type":"ContainerStarted","Data":"0e6688d41b0f74375450698ed7b0b7e0d37b220312bf64d49ede1ef0827821d3"} Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.799418 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2k9nw" event={"ID":"2345af62-3a63-4f5d-8a09-e33ee507c372","Type":"ContainerStarted","Data":"199055a3f36a5a5e43a6eac71a9dfd590ee1fb8efbceca1887ccc6049be4bcba"} Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.908695 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-2k9nw" podStartSLOduration=1.908674596 podStartE2EDuration="1.908674596s" podCreationTimestamp="2026-01-22 13:29:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:29:58.855875832 +0000 UTC m=+5706.433991677" watchObservedRunningTime="2026-01-22 13:29:58.908674596 +0000 UTC m=+5706.486790421" Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.954525 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-jmvhm"] Jan 22 13:29:58 crc kubenswrapper[4773]: I0122 13:29:58.965188 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-jmvhm"] Jan 22 13:29:59 crc kubenswrapper[4773]: I0122 13:29:59.809723 4773 generic.go:334] "Generic (PLEG): container finished" podID="3c871e72-423b-4390-930e-843a1f24c2ef" containerID="4493a71c8473fcb4b98525c32baf48effc78d0d20fa9df9423f3a6011b050479" exitCode=0 Jan 22 13:29:59 crc kubenswrapper[4773]: I0122 13:29:59.809840 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" event={"ID":"3c871e72-423b-4390-930e-843a1f24c2ef","Type":"ContainerDied","Data":"4493a71c8473fcb4b98525c32baf48effc78d0d20fa9df9423f3a6011b050479"} Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.143922 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq"] Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.147141 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.150368 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.152179 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.162361 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq"] Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.234953 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lwwc\" (UniqueName: \"kubernetes.io/projected/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-kube-api-access-9lwwc\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.235387 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-secret-volume\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.235518 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-config-volume\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.337444 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lwwc\" (UniqueName: \"kubernetes.io/projected/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-kube-api-access-9lwwc\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.337538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-secret-volume\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.337626 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-config-volume\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.338562 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-config-volume\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.343923 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-secret-volume\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.355968 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lwwc\" (UniqueName: \"kubernetes.io/projected/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-kube-api-access-9lwwc\") pod \"collect-profiles-29484810-gvgcq\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.486760 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.672115 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f853f59-ea9e-4dd0-84b5-fa2051f28396" path="/var/lib/kubelet/pods/0f853f59-ea9e-4dd0-84b5-fa2051f28396/volumes" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.928749 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" event={"ID":"3c871e72-423b-4390-930e-843a1f24c2ef","Type":"ContainerStarted","Data":"97ea39e12b895cfbfb3057514322954bdb50697f9b2cf264a89a8fc97b197890"} Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.929458 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:30:00 crc kubenswrapper[4773]: I0122 13:30:00.985071 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" podStartSLOduration=3.985044214 podStartE2EDuration="3.985044214s" podCreationTimestamp="2026-01-22 13:29:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:30:00.97385417 +0000 UTC m=+5708.551970005" watchObservedRunningTime="2026-01-22 13:30:00.985044214 +0000 UTC m=+5708.563160049" Jan 22 13:30:01 crc kubenswrapper[4773]: W0122 13:30:01.056869 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84cab1d5_65aa_4c69_854e_f1e1d7f87c08.slice/crio-f2b53aea856b02f043d2220445ef8d333cd27de05c28f359cdfe01cb4fe9f42a WatchSource:0}: Error finding container f2b53aea856b02f043d2220445ef8d333cd27de05c28f359cdfe01cb4fe9f42a: Status 404 returned error can't find the container with id f2b53aea856b02f043d2220445ef8d333cd27de05c28f359cdfe01cb4fe9f42a Jan 22 13:30:01 crc kubenswrapper[4773]: I0122 13:30:01.060983 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq"] Jan 22 13:30:01 crc kubenswrapper[4773]: I0122 13:30:01.954022 4773 generic.go:334] "Generic (PLEG): container finished" podID="84cab1d5-65aa-4c69-854e-f1e1d7f87c08" containerID="f932dd6b501dc64f80a735fa6c9653b9a45c3709693388758fda1100364049c2" exitCode=0 Jan 22 13:30:01 crc kubenswrapper[4773]: I0122 13:30:01.954095 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" event={"ID":"84cab1d5-65aa-4c69-854e-f1e1d7f87c08","Type":"ContainerDied","Data":"f932dd6b501dc64f80a735fa6c9653b9a45c3709693388758fda1100364049c2"} Jan 22 13:30:01 crc kubenswrapper[4773]: I0122 13:30:01.954166 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" event={"ID":"84cab1d5-65aa-4c69-854e-f1e1d7f87c08","Type":"ContainerStarted","Data":"f2b53aea856b02f043d2220445ef8d333cd27de05c28f359cdfe01cb4fe9f42a"} Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.100540 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6ddb845654-drmk5"] Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.104580 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.106486 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.107924 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.107991 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.139383 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6ddb845654-drmk5"] Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197418 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8szw\" (UniqueName: \"kubernetes.io/projected/38517212-420f-4320-ab7f-59390933f529-kube-api-access-h8szw\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197494 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-public-tls-certs\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197530 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-config-data\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197596 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-internal-tls-certs\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197624 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/38517212-420f-4320-ab7f-59390933f529-etc-swift\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197812 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38517212-420f-4320-ab7f-59390933f529-log-httpd\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.197986 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-combined-ca-bundle\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.198036 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38517212-420f-4320-ab7f-59390933f529-run-httpd\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.299646 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-internal-tls-certs\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.299968 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/38517212-420f-4320-ab7f-59390933f529-etc-swift\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.300017 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38517212-420f-4320-ab7f-59390933f529-log-httpd\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.300066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-combined-ca-bundle\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.300091 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38517212-420f-4320-ab7f-59390933f529-run-httpd\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.300151 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8szw\" (UniqueName: \"kubernetes.io/projected/38517212-420f-4320-ab7f-59390933f529-kube-api-access-h8szw\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.300184 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-public-tls-certs\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.300205 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-config-data\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.302539 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38517212-420f-4320-ab7f-59390933f529-run-httpd\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.302872 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38517212-420f-4320-ab7f-59390933f529-log-httpd\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.305955 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-internal-tls-certs\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.306301 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-config-data\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.306834 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/38517212-420f-4320-ab7f-59390933f529-etc-swift\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.306912 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-combined-ca-bundle\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.308166 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38517212-420f-4320-ab7f-59390933f529-public-tls-certs\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.321707 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8szw\" (UniqueName: \"kubernetes.io/projected/38517212-420f-4320-ab7f-59390933f529-kube-api-access-h8szw\") pod \"swift-proxy-6ddb845654-drmk5\" (UID: \"38517212-420f-4320-ab7f-59390933f529\") " pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.388388 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.461828 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.502662 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-secret-volume\") pod \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.504454 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-config-volume\") pod \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.504564 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lwwc\" (UniqueName: \"kubernetes.io/projected/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-kube-api-access-9lwwc\") pod \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\" (UID: \"84cab1d5-65aa-4c69-854e-f1e1d7f87c08\") " Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.505697 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-config-volume" (OuterVolumeSpecName: "config-volume") pod "84cab1d5-65aa-4c69-854e-f1e1d7f87c08" (UID: "84cab1d5-65aa-4c69-854e-f1e1d7f87c08"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.507254 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "84cab1d5-65aa-4c69-854e-f1e1d7f87c08" (UID: "84cab1d5-65aa-4c69-854e-f1e1d7f87c08"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.508601 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-kube-api-access-9lwwc" (OuterVolumeSpecName: "kube-api-access-9lwwc") pod "84cab1d5-65aa-4c69-854e-f1e1d7f87c08" (UID: "84cab1d5-65aa-4c69-854e-f1e1d7f87c08"). InnerVolumeSpecName "kube-api-access-9lwwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.607509 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.607834 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.607852 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lwwc\" (UniqueName: \"kubernetes.io/projected/84cab1d5-65aa-4c69-854e-f1e1d7f87c08-kube-api-access-9lwwc\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.974016 4773 generic.go:334] "Generic (PLEG): container finished" podID="2345af62-3a63-4f5d-8a09-e33ee507c372" containerID="0e6688d41b0f74375450698ed7b0b7e0d37b220312bf64d49ede1ef0827821d3" exitCode=0 Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.974086 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2k9nw" event={"ID":"2345af62-3a63-4f5d-8a09-e33ee507c372","Type":"ContainerDied","Data":"0e6688d41b0f74375450698ed7b0b7e0d37b220312bf64d49ede1ef0827821d3"} Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.976943 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" event={"ID":"84cab1d5-65aa-4c69-854e-f1e1d7f87c08","Type":"ContainerDied","Data":"f2b53aea856b02f043d2220445ef8d333cd27de05c28f359cdfe01cb4fe9f42a"} Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.976980 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2b53aea856b02f043d2220445ef8d333cd27de05c28f359cdfe01cb4fe9f42a" Jan 22 13:30:03 crc kubenswrapper[4773]: I0122 13:30:03.977020 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.073899 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.073974 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.074036 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.074953 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.075027 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" gracePeriod=600 Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.130091 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6ddb845654-drmk5"] Jan 22 13:30:04 crc kubenswrapper[4773]: E0122 13:30:04.205038 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.479790 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f"] Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.488404 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484765-bj72f"] Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.674390 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74ce6389-b348-4f77-adc5-832d46e5f3cf" path="/var/lib/kubelet/pods/74ce6389-b348-4f77-adc5-832d46e5f3cf/volumes" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.752909 4773 scope.go:117] "RemoveContainer" containerID="db576f384296f5a43a90c92a5975a7411a7af9b5d4a993677c9a824545f8ac2a" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.774449 4773 scope.go:117] "RemoveContainer" containerID="ab459075581e81285a4568c9e59b5306c474c0a753ed56a273e932a886a197a8" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.991034 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6ddb845654-drmk5" event={"ID":"38517212-420f-4320-ab7f-59390933f529","Type":"ContainerStarted","Data":"0cb835bf3365cc1dbfd4b5e6936354a8ad31d42ce081ea2624ccea8990f91561"} Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.991657 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6ddb845654-drmk5" event={"ID":"38517212-420f-4320-ab7f-59390933f529","Type":"ContainerStarted","Data":"781df55182356d792660064fe2e690b69d541dd59e94800b86e250e383edd68f"} Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.991712 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.991739 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6ddb845654-drmk5" event={"ID":"38517212-420f-4320-ab7f-59390933f529","Type":"ContainerStarted","Data":"42473e69c1543ac4716da547acc8f98261c39c661f6a0126e2faf907d92b0977"} Jan 22 13:30:04 crc kubenswrapper[4773]: I0122 13:30:04.991772 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.004728 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" exitCode=0 Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.004953 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506"} Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.004987 4773 scope.go:117] "RemoveContainer" containerID="22f45b11bbee937fdf871ae5de53af8235a219a99f6dd311534489dc7fbcd516" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.005611 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:30:05 crc kubenswrapper[4773]: E0122 13:30:05.005837 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.038760 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6ddb845654-drmk5" podStartSLOduration=2.038737166 podStartE2EDuration="2.038737166s" podCreationTimestamp="2026-01-22 13:30:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:30:05.032203153 +0000 UTC m=+5712.610318998" watchObservedRunningTime="2026-01-22 13:30:05.038737166 +0000 UTC m=+5712.616853001" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.327474 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349458 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98msh\" (UniqueName: \"kubernetes.io/projected/2345af62-3a63-4f5d-8a09-e33ee507c372-kube-api-access-98msh\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349635 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-swiftconf\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349718 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-combined-ca-bundle\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349773 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-scripts\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349834 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2345af62-3a63-4f5d-8a09-e33ee507c372-etc-swift\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349898 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-ring-data-devices\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.349935 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-dispersionconf\") pod \"2345af62-3a63-4f5d-8a09-e33ee507c372\" (UID: \"2345af62-3a63-4f5d-8a09-e33ee507c372\") " Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.350642 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.351488 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2345af62-3a63-4f5d-8a09-e33ee507c372-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.354969 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2345af62-3a63-4f5d-8a09-e33ee507c372-kube-api-access-98msh" (OuterVolumeSpecName: "kube-api-access-98msh") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "kube-api-access-98msh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.360151 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.379039 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-scripts" (OuterVolumeSpecName: "scripts") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.388454 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.391124 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2345af62-3a63-4f5d-8a09-e33ee507c372" (UID: "2345af62-3a63-4f5d-8a09-e33ee507c372"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453571 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453619 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453630 4773 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2345af62-3a63-4f5d-8a09-e33ee507c372-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453641 4773 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2345af62-3a63-4f5d-8a09-e33ee507c372-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453652 4773 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453666 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98msh\" (UniqueName: \"kubernetes.io/projected/2345af62-3a63-4f5d-8a09-e33ee507c372-kube-api-access-98msh\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:05 crc kubenswrapper[4773]: I0122 13:30:05.453678 4773 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2345af62-3a63-4f5d-8a09-e33ee507c372-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:06 crc kubenswrapper[4773]: I0122 13:30:06.015833 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2k9nw" event={"ID":"2345af62-3a63-4f5d-8a09-e33ee507c372","Type":"ContainerDied","Data":"199055a3f36a5a5e43a6eac71a9dfd590ee1fb8efbceca1887ccc6049be4bcba"} Jan 22 13:30:06 crc kubenswrapper[4773]: I0122 13:30:06.016045 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="199055a3f36a5a5e43a6eac71a9dfd590ee1fb8efbceca1887ccc6049be4bcba" Jan 22 13:30:06 crc kubenswrapper[4773]: I0122 13:30:06.015937 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2k9nw" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.170692 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.252905 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6db946c6ff-qsl54"] Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.253672 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerName="dnsmasq-dns" containerID="cri-o://f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1" gracePeriod=10 Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.791595 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.826086 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-sb\") pod \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.826169 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-config\") pod \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.826239 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xqth\" (UniqueName: \"kubernetes.io/projected/9adc3ff0-321e-4763-8e3d-b9767a3b976e-kube-api-access-4xqth\") pod \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.826265 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-nb\") pod \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.826838 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-dns-svc\") pod \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\" (UID: \"9adc3ff0-321e-4763-8e3d-b9767a3b976e\") " Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.846353 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9adc3ff0-321e-4763-8e3d-b9767a3b976e-kube-api-access-4xqth" (OuterVolumeSpecName: "kube-api-access-4xqth") pod "9adc3ff0-321e-4763-8e3d-b9767a3b976e" (UID: "9adc3ff0-321e-4763-8e3d-b9767a3b976e"). InnerVolumeSpecName "kube-api-access-4xqth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.872209 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9adc3ff0-321e-4763-8e3d-b9767a3b976e" (UID: "9adc3ff0-321e-4763-8e3d-b9767a3b976e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.882157 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9adc3ff0-321e-4763-8e3d-b9767a3b976e" (UID: "9adc3ff0-321e-4763-8e3d-b9767a3b976e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.884267 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-config" (OuterVolumeSpecName: "config") pod "9adc3ff0-321e-4763-8e3d-b9767a3b976e" (UID: "9adc3ff0-321e-4763-8e3d-b9767a3b976e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.886924 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9adc3ff0-321e-4763-8e3d-b9767a3b976e" (UID: "9adc3ff0-321e-4763-8e3d-b9767a3b976e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.929221 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xqth\" (UniqueName: \"kubernetes.io/projected/9adc3ff0-321e-4763-8e3d-b9767a3b976e-kube-api-access-4xqth\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.929256 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.929267 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.929276 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:08 crc kubenswrapper[4773]: I0122 13:30:08.929303 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9adc3ff0-321e-4763-8e3d-b9767a3b976e-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.061515 4773 generic.go:334] "Generic (PLEG): container finished" podID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerID="f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1" exitCode=0 Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.061594 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.061610 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" event={"ID":"9adc3ff0-321e-4763-8e3d-b9767a3b976e","Type":"ContainerDied","Data":"f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1"} Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.061720 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6db946c6ff-qsl54" event={"ID":"9adc3ff0-321e-4763-8e3d-b9767a3b976e","Type":"ContainerDied","Data":"83c2421b983d2b95814531eed7250098b115318c825e97a5394ddef2be19aa46"} Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.061751 4773 scope.go:117] "RemoveContainer" containerID="f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.096698 4773 scope.go:117] "RemoveContainer" containerID="3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.099109 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6db946c6ff-qsl54"] Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.108180 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6db946c6ff-qsl54"] Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.122010 4773 scope.go:117] "RemoveContainer" containerID="f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1" Jan 22 13:30:09 crc kubenswrapper[4773]: E0122 13:30:09.122522 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1\": container with ID starting with f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1 not found: ID does not exist" containerID="f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.122581 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1"} err="failed to get container status \"f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1\": rpc error: code = NotFound desc = could not find container \"f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1\": container with ID starting with f23456267e2e77f7676c01877d773043ccc47ddb72615026e0a7ad0a779581c1 not found: ID does not exist" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.122616 4773 scope.go:117] "RemoveContainer" containerID="3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02" Jan 22 13:30:09 crc kubenswrapper[4773]: E0122 13:30:09.122966 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02\": container with ID starting with 3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02 not found: ID does not exist" containerID="3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02" Jan 22 13:30:09 crc kubenswrapper[4773]: I0122 13:30:09.123001 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02"} err="failed to get container status \"3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02\": rpc error: code = NotFound desc = could not find container \"3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02\": container with ID starting with 3bdc0a59682c6e90b4cdc0e7dee9d916e6fbb9d0e3dc2f46242db6c6ddf2cb02 not found: ID does not exist" Jan 22 13:30:10 crc kubenswrapper[4773]: I0122 13:30:10.674510 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" path="/var/lib/kubelet/pods/9adc3ff0-321e-4763-8e3d-b9767a3b976e/volumes" Jan 22 13:30:13 crc kubenswrapper[4773]: I0122 13:30:13.467207 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:13 crc kubenswrapper[4773]: I0122 13:30:13.471201 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6ddb845654-drmk5" Jan 22 13:30:18 crc kubenswrapper[4773]: I0122 13:30:18.657717 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:30:18 crc kubenswrapper[4773]: E0122 13:30:18.658374 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321340 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-nfjnh"] Jan 22 13:30:19 crc kubenswrapper[4773]: E0122 13:30:19.321715 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerName="init" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321734 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerName="init" Jan 22 13:30:19 crc kubenswrapper[4773]: E0122 13:30:19.321754 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84cab1d5-65aa-4c69-854e-f1e1d7f87c08" containerName="collect-profiles" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321761 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="84cab1d5-65aa-4c69-854e-f1e1d7f87c08" containerName="collect-profiles" Jan 22 13:30:19 crc kubenswrapper[4773]: E0122 13:30:19.321769 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerName="dnsmasq-dns" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321776 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerName="dnsmasq-dns" Jan 22 13:30:19 crc kubenswrapper[4773]: E0122 13:30:19.321794 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2345af62-3a63-4f5d-8a09-e33ee507c372" containerName="swift-ring-rebalance" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321799 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2345af62-3a63-4f5d-8a09-e33ee507c372" containerName="swift-ring-rebalance" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321946 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="84cab1d5-65aa-4c69-854e-f1e1d7f87c08" containerName="collect-profiles" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321959 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2345af62-3a63-4f5d-8a09-e33ee507c372" containerName="swift-ring-rebalance" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.321970 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9adc3ff0-321e-4763-8e3d-b9767a3b976e" containerName="dnsmasq-dns" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.322580 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.332530 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nfjnh"] Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.431053 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-ddef-account-create-update-4qzfx"] Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.432204 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.434436 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.439311 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ddef-account-create-update-4qzfx"] Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.441231 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-operator-scripts\") pod \"cinder-db-create-nfjnh\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.441266 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9lvk\" (UniqueName: \"kubernetes.io/projected/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-kube-api-access-q9lvk\") pod \"cinder-db-create-nfjnh\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.545933 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-operator-scripts\") pod \"cinder-db-create-nfjnh\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.545983 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9lvk\" (UniqueName: \"kubernetes.io/projected/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-kube-api-access-q9lvk\") pod \"cinder-db-create-nfjnh\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.546008 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpqbw\" (UniqueName: \"kubernetes.io/projected/4a29aee8-80d7-4408-be21-ef961794ede3-kube-api-access-kpqbw\") pod \"cinder-ddef-account-create-update-4qzfx\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.546078 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a29aee8-80d7-4408-be21-ef961794ede3-operator-scripts\") pod \"cinder-ddef-account-create-update-4qzfx\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.547026 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-operator-scripts\") pod \"cinder-db-create-nfjnh\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.564378 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9lvk\" (UniqueName: \"kubernetes.io/projected/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-kube-api-access-q9lvk\") pod \"cinder-db-create-nfjnh\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.643369 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.647258 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a29aee8-80d7-4408-be21-ef961794ede3-operator-scripts\") pod \"cinder-ddef-account-create-update-4qzfx\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.647427 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpqbw\" (UniqueName: \"kubernetes.io/projected/4a29aee8-80d7-4408-be21-ef961794ede3-kube-api-access-kpqbw\") pod \"cinder-ddef-account-create-update-4qzfx\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.648530 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a29aee8-80d7-4408-be21-ef961794ede3-operator-scripts\") pod \"cinder-ddef-account-create-update-4qzfx\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.669080 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpqbw\" (UniqueName: \"kubernetes.io/projected/4a29aee8-80d7-4408-be21-ef961794ede3-kube-api-access-kpqbw\") pod \"cinder-ddef-account-create-update-4qzfx\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:19 crc kubenswrapper[4773]: I0122 13:30:19.747034 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:20 crc kubenswrapper[4773]: W0122 13:30:20.100379 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbaf43c35_2259_4842_ba3b_cd8ed4ffce8c.slice/crio-202daa78e006cb689d5126fe594c376bd7a8d547c874a77265f37b1ab946516d WatchSource:0}: Error finding container 202daa78e006cb689d5126fe594c376bd7a8d547c874a77265f37b1ab946516d: Status 404 returned error can't find the container with id 202daa78e006cb689d5126fe594c376bd7a8d547c874a77265f37b1ab946516d Jan 22 13:30:20 crc kubenswrapper[4773]: I0122 13:30:20.106296 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-nfjnh"] Jan 22 13:30:20 crc kubenswrapper[4773]: I0122 13:30:20.159719 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nfjnh" event={"ID":"baf43c35-2259-4842-ba3b-cd8ed4ffce8c","Type":"ContainerStarted","Data":"202daa78e006cb689d5126fe594c376bd7a8d547c874a77265f37b1ab946516d"} Jan 22 13:30:20 crc kubenswrapper[4773]: I0122 13:30:20.293097 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ddef-account-create-update-4qzfx"] Jan 22 13:30:20 crc kubenswrapper[4773]: W0122 13:30:20.295399 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a29aee8_80d7_4408_be21_ef961794ede3.slice/crio-3312c84b1eef580a8362866f93433309ac3da5af2f7e3184582972f69569a300 WatchSource:0}: Error finding container 3312c84b1eef580a8362866f93433309ac3da5af2f7e3184582972f69569a300: Status 404 returned error can't find the container with id 3312c84b1eef580a8362866f93433309ac3da5af2f7e3184582972f69569a300 Jan 22 13:30:21 crc kubenswrapper[4773]: I0122 13:30:21.174029 4773 generic.go:334] "Generic (PLEG): container finished" podID="baf43c35-2259-4842-ba3b-cd8ed4ffce8c" containerID="fbb7eda80213ed5aaeac14c41c132bf12b770d50fe547a045d0f3e3a5a44d041" exitCode=0 Jan 22 13:30:21 crc kubenswrapper[4773]: I0122 13:30:21.174137 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nfjnh" event={"ID":"baf43c35-2259-4842-ba3b-cd8ed4ffce8c","Type":"ContainerDied","Data":"fbb7eda80213ed5aaeac14c41c132bf12b770d50fe547a045d0f3e3a5a44d041"} Jan 22 13:30:21 crc kubenswrapper[4773]: I0122 13:30:21.180552 4773 generic.go:334] "Generic (PLEG): container finished" podID="4a29aee8-80d7-4408-be21-ef961794ede3" containerID="d4fabfaf6ec353128c8116eaa179b85d3866aaacfcc11efd4dcacaa9f2c8e6b6" exitCode=0 Jan 22 13:30:21 crc kubenswrapper[4773]: I0122 13:30:21.180589 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ddef-account-create-update-4qzfx" event={"ID":"4a29aee8-80d7-4408-be21-ef961794ede3","Type":"ContainerDied","Data":"d4fabfaf6ec353128c8116eaa179b85d3866aaacfcc11efd4dcacaa9f2c8e6b6"} Jan 22 13:30:21 crc kubenswrapper[4773]: I0122 13:30:21.180614 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ddef-account-create-update-4qzfx" event={"ID":"4a29aee8-80d7-4408-be21-ef961794ede3","Type":"ContainerStarted","Data":"3312c84b1eef580a8362866f93433309ac3da5af2f7e3184582972f69569a300"} Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.625548 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.633594 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.703709 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpqbw\" (UniqueName: \"kubernetes.io/projected/4a29aee8-80d7-4408-be21-ef961794ede3-kube-api-access-kpqbw\") pod \"4a29aee8-80d7-4408-be21-ef961794ede3\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.703806 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9lvk\" (UniqueName: \"kubernetes.io/projected/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-kube-api-access-q9lvk\") pod \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.703938 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-operator-scripts\") pod \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\" (UID: \"baf43c35-2259-4842-ba3b-cd8ed4ffce8c\") " Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.703990 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a29aee8-80d7-4408-be21-ef961794ede3-operator-scripts\") pod \"4a29aee8-80d7-4408-be21-ef961794ede3\" (UID: \"4a29aee8-80d7-4408-be21-ef961794ede3\") " Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.704972 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a29aee8-80d7-4408-be21-ef961794ede3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a29aee8-80d7-4408-be21-ef961794ede3" (UID: "4a29aee8-80d7-4408-be21-ef961794ede3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.705047 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "baf43c35-2259-4842-ba3b-cd8ed4ffce8c" (UID: "baf43c35-2259-4842-ba3b-cd8ed4ffce8c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.709079 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a29aee8-80d7-4408-be21-ef961794ede3-kube-api-access-kpqbw" (OuterVolumeSpecName: "kube-api-access-kpqbw") pod "4a29aee8-80d7-4408-be21-ef961794ede3" (UID: "4a29aee8-80d7-4408-be21-ef961794ede3"). InnerVolumeSpecName "kube-api-access-kpqbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.709120 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-kube-api-access-q9lvk" (OuterVolumeSpecName: "kube-api-access-q9lvk") pod "baf43c35-2259-4842-ba3b-cd8ed4ffce8c" (UID: "baf43c35-2259-4842-ba3b-cd8ed4ffce8c"). InnerVolumeSpecName "kube-api-access-q9lvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.805860 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.806556 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a29aee8-80d7-4408-be21-ef961794ede3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.806575 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpqbw\" (UniqueName: \"kubernetes.io/projected/4a29aee8-80d7-4408-be21-ef961794ede3-kube-api-access-kpqbw\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:22 crc kubenswrapper[4773]: I0122 13:30:22.806589 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9lvk\" (UniqueName: \"kubernetes.io/projected/baf43c35-2259-4842-ba3b-cd8ed4ffce8c-kube-api-access-q9lvk\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:23 crc kubenswrapper[4773]: I0122 13:30:23.216106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-nfjnh" event={"ID":"baf43c35-2259-4842-ba3b-cd8ed4ffce8c","Type":"ContainerDied","Data":"202daa78e006cb689d5126fe594c376bd7a8d547c874a77265f37b1ab946516d"} Jan 22 13:30:23 crc kubenswrapper[4773]: I0122 13:30:23.216167 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="202daa78e006cb689d5126fe594c376bd7a8d547c874a77265f37b1ab946516d" Jan 22 13:30:23 crc kubenswrapper[4773]: I0122 13:30:23.216166 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-nfjnh" Jan 22 13:30:23 crc kubenswrapper[4773]: I0122 13:30:23.218381 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ddef-account-create-update-4qzfx" event={"ID":"4a29aee8-80d7-4408-be21-ef961794ede3","Type":"ContainerDied","Data":"3312c84b1eef580a8362866f93433309ac3da5af2f7e3184582972f69569a300"} Jan 22 13:30:23 crc kubenswrapper[4773]: I0122 13:30:23.218405 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3312c84b1eef580a8362866f93433309ac3da5af2f7e3184582972f69569a300" Jan 22 13:30:23 crc kubenswrapper[4773]: I0122 13:30:23.218457 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ddef-account-create-update-4qzfx" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.643058 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-q87g6"] Jan 22 13:30:24 crc kubenswrapper[4773]: E0122 13:30:24.645070 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a29aee8-80d7-4408-be21-ef961794ede3" containerName="mariadb-account-create-update" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.645096 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a29aee8-80d7-4408-be21-ef961794ede3" containerName="mariadb-account-create-update" Jan 22 13:30:24 crc kubenswrapper[4773]: E0122 13:30:24.645120 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baf43c35-2259-4842-ba3b-cd8ed4ffce8c" containerName="mariadb-database-create" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.645129 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="baf43c35-2259-4842-ba3b-cd8ed4ffce8c" containerName="mariadb-database-create" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.645357 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="baf43c35-2259-4842-ba3b-cd8ed4ffce8c" containerName="mariadb-database-create" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.645389 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a29aee8-80d7-4408-be21-ef961794ede3" containerName="mariadb-account-create-update" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.646586 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.649915 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-g66gp" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.649965 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.650133 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.651656 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-q87g6"] Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.743823 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgc9k\" (UniqueName: \"kubernetes.io/projected/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-kube-api-access-kgc9k\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.743907 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-scripts\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.743942 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-combined-ca-bundle\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.744015 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-etc-machine-id\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.744326 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-db-sync-config-data\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.744365 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-config-data\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.845974 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-db-sync-config-data\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.846029 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-config-data\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.846078 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgc9k\" (UniqueName: \"kubernetes.io/projected/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-kube-api-access-kgc9k\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.846129 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-scripts\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.846154 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-combined-ca-bundle\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.846206 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-etc-machine-id\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.846351 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-etc-machine-id\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.850956 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-db-sync-config-data\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.851168 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-scripts\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.851448 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-combined-ca-bundle\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.851649 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-config-data\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.864838 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgc9k\" (UniqueName: \"kubernetes.io/projected/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-kube-api-access-kgc9k\") pod \"cinder-db-sync-q87g6\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:24 crc kubenswrapper[4773]: I0122 13:30:24.980771 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:25 crc kubenswrapper[4773]: I0122 13:30:25.401623 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-q87g6"] Jan 22 13:30:25 crc kubenswrapper[4773]: W0122 13:30:25.404599 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddede7e19_f7fd_4acb_a9b4_26b48bfdbc69.slice/crio-10d8872055e8caf0f3a1cb056847048279d15e5480a09d357fbe025d8c10fc29 WatchSource:0}: Error finding container 10d8872055e8caf0f3a1cb056847048279d15e5480a09d357fbe025d8c10fc29: Status 404 returned error can't find the container with id 10d8872055e8caf0f3a1cb056847048279d15e5480a09d357fbe025d8c10fc29 Jan 22 13:30:26 crc kubenswrapper[4773]: I0122 13:30:26.249879 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q87g6" event={"ID":"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69","Type":"ContainerStarted","Data":"9c9b49b8c35a41fa36af5436215e2d042e45556e147c336bb248ca84126bd4ce"} Jan 22 13:30:26 crc kubenswrapper[4773]: I0122 13:30:26.250239 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q87g6" event={"ID":"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69","Type":"ContainerStarted","Data":"10d8872055e8caf0f3a1cb056847048279d15e5480a09d357fbe025d8c10fc29"} Jan 22 13:30:26 crc kubenswrapper[4773]: I0122 13:30:26.277170 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-q87g6" podStartSLOduration=2.277140348 podStartE2EDuration="2.277140348s" podCreationTimestamp="2026-01-22 13:30:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:30:26.269831423 +0000 UTC m=+5733.847947318" watchObservedRunningTime="2026-01-22 13:30:26.277140348 +0000 UTC m=+5733.855256183" Jan 22 13:30:28 crc kubenswrapper[4773]: I0122 13:30:28.870890 4773 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod0f853f59-ea9e-4dd0-84b5-fa2051f28396"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod0f853f59-ea9e-4dd0-84b5-fa2051f28396] : Timed out while waiting for systemd to remove kubepods-besteffort-pod0f853f59_ea9e_4dd0_84b5_fa2051f28396.slice" Jan 22 13:30:29 crc kubenswrapper[4773]: I0122 13:30:29.278805 4773 generic.go:334] "Generic (PLEG): container finished" podID="dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" containerID="9c9b49b8c35a41fa36af5436215e2d042e45556e147c336bb248ca84126bd4ce" exitCode=0 Jan 22 13:30:29 crc kubenswrapper[4773]: I0122 13:30:29.278856 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q87g6" event={"ID":"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69","Type":"ContainerDied","Data":"9c9b49b8c35a41fa36af5436215e2d042e45556e147c336bb248ca84126bd4ce"} Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.645182 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.658493 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:30:30 crc kubenswrapper[4773]: E0122 13:30:30.658756 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.781445 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-db-sync-config-data\") pod \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.781553 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgc9k\" (UniqueName: \"kubernetes.io/projected/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-kube-api-access-kgc9k\") pod \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.781768 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-scripts\") pod \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.781820 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-config-data\") pod \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.781857 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-combined-ca-bundle\") pod \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.781962 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-etc-machine-id\") pod \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\" (UID: \"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69\") " Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.784037 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" (UID: "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.787787 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" (UID: "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.790522 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-kube-api-access-kgc9k" (OuterVolumeSpecName: "kube-api-access-kgc9k") pod "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" (UID: "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69"). InnerVolumeSpecName "kube-api-access-kgc9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.791422 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-scripts" (OuterVolumeSpecName: "scripts") pod "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" (UID: "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.810323 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" (UID: "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.825622 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-config-data" (OuterVolumeSpecName: "config-data") pod "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" (UID: "dede7e19-f7fd-4acb-a9b4-26b48bfdbc69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.884797 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.884889 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.884917 4773 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.884944 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgc9k\" (UniqueName: \"kubernetes.io/projected/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-kube-api-access-kgc9k\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.884973 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:30 crc kubenswrapper[4773]: I0122 13:30:30.884990 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.314205 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-q87g6" event={"ID":"dede7e19-f7fd-4acb-a9b4-26b48bfdbc69","Type":"ContainerDied","Data":"10d8872055e8caf0f3a1cb056847048279d15e5480a09d357fbe025d8c10fc29"} Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.314266 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10d8872055e8caf0f3a1cb056847048279d15e5480a09d357fbe025d8c10fc29" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.314308 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-q87g6" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.674152 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86bfd78d95-7pw9h"] Jan 22 13:30:31 crc kubenswrapper[4773]: E0122 13:30:31.674652 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" containerName="cinder-db-sync" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.674672 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" containerName="cinder-db-sync" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.674902 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" containerName="cinder-db-sync" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.676024 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.694459 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bfd78d95-7pw9h"] Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.803737 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.804046 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-dns-svc\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.804110 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-config\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.804150 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.804322 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvbtd\" (UniqueName: \"kubernetes.io/projected/13856aea-6a69-4cd9-b4b8-276d639956e7-kube-api-access-zvbtd\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.896128 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.897668 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.905902 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-dns-svc\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.905951 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-config\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.905976 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.906028 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvbtd\" (UniqueName: \"kubernetes.io/projected/13856aea-6a69-4cd9-b4b8-276d639956e7-kube-api-access-zvbtd\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.906063 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.906126 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.906992 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.907075 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-dns-svc\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.907119 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-config\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.907145 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.911167 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-g66gp" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.912506 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.921183 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.921711 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.937837 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvbtd\" (UniqueName: \"kubernetes.io/projected/13856aea-6a69-4cd9-b4b8-276d639956e7-kube-api-access-zvbtd\") pod \"dnsmasq-dns-86bfd78d95-7pw9h\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:31 crc kubenswrapper[4773]: I0122 13:30:31.996496 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.007998 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748bcad7-ac45-420a-93ef-258073d630f7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.008047 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-scripts\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.008089 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748bcad7-ac45-420a-93ef-258073d630f7-logs\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.008118 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data-custom\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.008136 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlcgt\" (UniqueName: \"kubernetes.io/projected/748bcad7-ac45-420a-93ef-258073d630f7-kube-api-access-jlcgt\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.008170 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.008238 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109753 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748bcad7-ac45-420a-93ef-258073d630f7-logs\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109802 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data-custom\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109821 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlcgt\" (UniqueName: \"kubernetes.io/projected/748bcad7-ac45-420a-93ef-258073d630f7-kube-api-access-jlcgt\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109861 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109937 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109979 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748bcad7-ac45-420a-93ef-258073d630f7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.109999 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-scripts\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.111757 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748bcad7-ac45-420a-93ef-258073d630f7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.112186 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748bcad7-ac45-420a-93ef-258073d630f7-logs\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.116940 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.125041 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data-custom\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.125959 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.130105 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-scripts\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.141270 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlcgt\" (UniqueName: \"kubernetes.io/projected/748bcad7-ac45-420a-93ef-258073d630f7-kube-api-access-jlcgt\") pod \"cinder-api-0\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.214634 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.472764 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bfd78d95-7pw9h"] Jan 22 13:30:32 crc kubenswrapper[4773]: W0122 13:30:32.695744 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod748bcad7_ac45_420a_93ef_258073d630f7.slice/crio-6e1007c0f19ae7806158af0c1206e6786500181df7cfe500967314456e07a9df WatchSource:0}: Error finding container 6e1007c0f19ae7806158af0c1206e6786500181df7cfe500967314456e07a9df: Status 404 returned error can't find the container with id 6e1007c0f19ae7806158af0c1206e6786500181df7cfe500967314456e07a9df Jan 22 13:30:32 crc kubenswrapper[4773]: I0122 13:30:32.706594 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:33 crc kubenswrapper[4773]: I0122 13:30:33.337785 4773 generic.go:334] "Generic (PLEG): container finished" podID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerID="b7cd7d8cef1d6acbb4108ee3c4bc3462ec68e2908d554f7ddfc9dcf2ae5e9196" exitCode=0 Jan 22 13:30:33 crc kubenswrapper[4773]: I0122 13:30:33.337898 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" event={"ID":"13856aea-6a69-4cd9-b4b8-276d639956e7","Type":"ContainerDied","Data":"b7cd7d8cef1d6acbb4108ee3c4bc3462ec68e2908d554f7ddfc9dcf2ae5e9196"} Jan 22 13:30:33 crc kubenswrapper[4773]: I0122 13:30:33.338166 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" event={"ID":"13856aea-6a69-4cd9-b4b8-276d639956e7","Type":"ContainerStarted","Data":"9e5f3d359182d37b12beeadb4a2d7049c7be6b8f3a2c3131bd1134b5f7ddd699"} Jan 22 13:30:33 crc kubenswrapper[4773]: I0122 13:30:33.341130 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748bcad7-ac45-420a-93ef-258073d630f7","Type":"ContainerStarted","Data":"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2"} Jan 22 13:30:33 crc kubenswrapper[4773]: I0122 13:30:33.341173 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748bcad7-ac45-420a-93ef-258073d630f7","Type":"ContainerStarted","Data":"6e1007c0f19ae7806158af0c1206e6786500181df7cfe500967314456e07a9df"} Jan 22 13:30:33 crc kubenswrapper[4773]: I0122 13:30:33.976995 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:34 crc kubenswrapper[4773]: I0122 13:30:34.356943 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" event={"ID":"13856aea-6a69-4cd9-b4b8-276d639956e7","Type":"ContainerStarted","Data":"9a7665f59fb03a07996049d567d9ec7e30102b98ed27d9f32c5f60d6f423493e"} Jan 22 13:30:34 crc kubenswrapper[4773]: I0122 13:30:34.357445 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:34 crc kubenswrapper[4773]: I0122 13:30:34.360598 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748bcad7-ac45-420a-93ef-258073d630f7","Type":"ContainerStarted","Data":"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81"} Jan 22 13:30:34 crc kubenswrapper[4773]: I0122 13:30:34.361477 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 13:30:34 crc kubenswrapper[4773]: I0122 13:30:34.386608 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" podStartSLOduration=3.38657656 podStartE2EDuration="3.38657656s" podCreationTimestamp="2026-01-22 13:30:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:30:34.375129328 +0000 UTC m=+5741.953245213" watchObservedRunningTime="2026-01-22 13:30:34.38657656 +0000 UTC m=+5741.964692415" Jan 22 13:30:34 crc kubenswrapper[4773]: I0122 13:30:34.401580 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.4015489309999998 podStartE2EDuration="3.401548931s" podCreationTimestamp="2026-01-22 13:30:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:30:34.393927997 +0000 UTC m=+5741.972043862" watchObservedRunningTime="2026-01-22 13:30:34.401548931 +0000 UTC m=+5741.979664746" Jan 22 13:30:35 crc kubenswrapper[4773]: I0122 13:30:35.368077 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api-log" containerID="cri-o://ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2" gracePeriod=30 Jan 22 13:30:35 crc kubenswrapper[4773]: I0122 13:30:35.368124 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api" containerID="cri-o://e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81" gracePeriod=30 Jan 22 13:30:35 crc kubenswrapper[4773]: I0122 13:30:35.944801 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080543 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlcgt\" (UniqueName: \"kubernetes.io/projected/748bcad7-ac45-420a-93ef-258073d630f7-kube-api-access-jlcgt\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080593 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-combined-ca-bundle\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080656 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080708 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748bcad7-ac45-420a-93ef-258073d630f7-etc-machine-id\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080773 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748bcad7-ac45-420a-93ef-258073d630f7-logs\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080837 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/748bcad7-ac45-420a-93ef-258073d630f7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.080961 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-scripts\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.081240 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/748bcad7-ac45-420a-93ef-258073d630f7-logs" (OuterVolumeSpecName: "logs") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.081413 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data-custom\") pod \"748bcad7-ac45-420a-93ef-258073d630f7\" (UID: \"748bcad7-ac45-420a-93ef-258073d630f7\") " Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.081902 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748bcad7-ac45-420a-93ef-258073d630f7-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.081928 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748bcad7-ac45-420a-93ef-258073d630f7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.086230 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.087010 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-scripts" (OuterVolumeSpecName: "scripts") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.087494 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/748bcad7-ac45-420a-93ef-258073d630f7-kube-api-access-jlcgt" (OuterVolumeSpecName: "kube-api-access-jlcgt") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "kube-api-access-jlcgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.104509 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.125948 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data" (OuterVolumeSpecName: "config-data") pod "748bcad7-ac45-420a-93ef-258073d630f7" (UID: "748bcad7-ac45-420a-93ef-258073d630f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.183374 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.183404 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.183416 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlcgt\" (UniqueName: \"kubernetes.io/projected/748bcad7-ac45-420a-93ef-258073d630f7-kube-api-access-jlcgt\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.183425 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.183434 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748bcad7-ac45-420a-93ef-258073d630f7-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.381118 4773 generic.go:334] "Generic (PLEG): container finished" podID="748bcad7-ac45-420a-93ef-258073d630f7" containerID="e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81" exitCode=0 Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.382063 4773 generic.go:334] "Generic (PLEG): container finished" podID="748bcad7-ac45-420a-93ef-258073d630f7" containerID="ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2" exitCode=143 Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.381257 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748bcad7-ac45-420a-93ef-258073d630f7","Type":"ContainerDied","Data":"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81"} Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.381199 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.382716 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748bcad7-ac45-420a-93ef-258073d630f7","Type":"ContainerDied","Data":"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2"} Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.382924 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748bcad7-ac45-420a-93ef-258073d630f7","Type":"ContainerDied","Data":"6e1007c0f19ae7806158af0c1206e6786500181df7cfe500967314456e07a9df"} Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.382859 4773 scope.go:117] "RemoveContainer" containerID="e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.405068 4773 scope.go:117] "RemoveContainer" containerID="ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.422397 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.443874 4773 scope.go:117] "RemoveContainer" containerID="e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81" Jan 22 13:30:36 crc kubenswrapper[4773]: E0122 13:30:36.444421 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81\": container with ID starting with e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81 not found: ID does not exist" containerID="e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.444463 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81"} err="failed to get container status \"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81\": rpc error: code = NotFound desc = could not find container \"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81\": container with ID starting with e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81 not found: ID does not exist" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.444502 4773 scope.go:117] "RemoveContainer" containerID="ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2" Jan 22 13:30:36 crc kubenswrapper[4773]: E0122 13:30:36.444745 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2\": container with ID starting with ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2 not found: ID does not exist" containerID="ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.444761 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2"} err="failed to get container status \"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2\": rpc error: code = NotFound desc = could not find container \"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2\": container with ID starting with ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2 not found: ID does not exist" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.444777 4773 scope.go:117] "RemoveContainer" containerID="e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.445586 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81"} err="failed to get container status \"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81\": rpc error: code = NotFound desc = could not find container \"e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81\": container with ID starting with e0381b3c86ed9e1d43a76196facc3ddec53abb962bc5bb939c2b04017872dc81 not found: ID does not exist" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.445629 4773 scope.go:117] "RemoveContainer" containerID="ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.446492 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2"} err="failed to get container status \"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2\": rpc error: code = NotFound desc = could not find container \"ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2\": container with ID starting with ac4b410b20af36a5c13085094961bea9df3acccbdc3a0873bcb1d82e204a72f2 not found: ID does not exist" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.449146 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.461052 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:36 crc kubenswrapper[4773]: E0122 13:30:36.461462 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api-log" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.461486 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api-log" Jan 22 13:30:36 crc kubenswrapper[4773]: E0122 13:30:36.462820 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.462835 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.463229 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api-log" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.463275 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="748bcad7-ac45-420a-93ef-258073d630f7" containerName="cinder-api" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.464980 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.470784 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.471020 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.471169 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.471330 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.471479 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-g66gp" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.471597 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.472619 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.593912 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.593959 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d673a20a-c38d-4e53-9415-8cc26d618dc2-logs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594042 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594074 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594121 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d673a20a-c38d-4e53-9415-8cc26d618dc2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594188 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594213 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data-custom\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594233 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-scripts\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.594254 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rwzq\" (UniqueName: \"kubernetes.io/projected/d673a20a-c38d-4e53-9415-8cc26d618dc2-kube-api-access-6rwzq\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.667725 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="748bcad7-ac45-420a-93ef-258073d630f7" path="/var/lib/kubelet/pods/748bcad7-ac45-420a-93ef-258073d630f7/volumes" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696235 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-scripts\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696300 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rwzq\" (UniqueName: \"kubernetes.io/projected/d673a20a-c38d-4e53-9415-8cc26d618dc2-kube-api-access-6rwzq\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696362 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696379 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d673a20a-c38d-4e53-9415-8cc26d618dc2-logs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696435 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696482 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696522 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d673a20a-c38d-4e53-9415-8cc26d618dc2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696558 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696578 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data-custom\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.696931 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d673a20a-c38d-4e53-9415-8cc26d618dc2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.697667 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d673a20a-c38d-4e53-9415-8cc26d618dc2-logs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.701906 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-scripts\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.702180 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.702407 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-public-tls-certs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.702517 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.702711 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.703093 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data-custom\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.717864 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rwzq\" (UniqueName: \"kubernetes.io/projected/d673a20a-c38d-4e53-9415-8cc26d618dc2-kube-api-access-6rwzq\") pod \"cinder-api-0\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " pod="openstack/cinder-api-0" Jan 22 13:30:36 crc kubenswrapper[4773]: I0122 13:30:36.830459 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:30:37 crc kubenswrapper[4773]: I0122 13:30:37.285765 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:30:37 crc kubenswrapper[4773]: W0122 13:30:37.292767 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd673a20a_c38d_4e53_9415_8cc26d618dc2.slice/crio-e1a45ff70890a5edb1144d665dbf500a840ed1b97af372296a14995301cb776c WatchSource:0}: Error finding container e1a45ff70890a5edb1144d665dbf500a840ed1b97af372296a14995301cb776c: Status 404 returned error can't find the container with id e1a45ff70890a5edb1144d665dbf500a840ed1b97af372296a14995301cb776c Jan 22 13:30:37 crc kubenswrapper[4773]: I0122 13:30:37.395474 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d673a20a-c38d-4e53-9415-8cc26d618dc2","Type":"ContainerStarted","Data":"e1a45ff70890a5edb1144d665dbf500a840ed1b97af372296a14995301cb776c"} Jan 22 13:30:38 crc kubenswrapper[4773]: I0122 13:30:38.407616 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d673a20a-c38d-4e53-9415-8cc26d618dc2","Type":"ContainerStarted","Data":"a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5"} Jan 22 13:30:39 crc kubenswrapper[4773]: I0122 13:30:39.421696 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d673a20a-c38d-4e53-9415-8cc26d618dc2","Type":"ContainerStarted","Data":"b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb"} Jan 22 13:30:39 crc kubenswrapper[4773]: I0122 13:30:39.422195 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 13:30:39 crc kubenswrapper[4773]: I0122 13:30:39.451356 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.451331639 podStartE2EDuration="3.451331639s" podCreationTimestamp="2026-01-22 13:30:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:30:39.451036391 +0000 UTC m=+5747.029152236" watchObservedRunningTime="2026-01-22 13:30:39.451331639 +0000 UTC m=+5747.029447484" Jan 22 13:30:41 crc kubenswrapper[4773]: I0122 13:30:41.998788 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.072161 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-846f6dc6f9-zqwqm"] Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.072494 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" containerName="dnsmasq-dns" containerID="cri-o://97ea39e12b895cfbfb3057514322954bdb50697f9b2cf264a89a8fc97b197890" gracePeriod=10 Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.450442 4773 generic.go:334] "Generic (PLEG): container finished" podID="3c871e72-423b-4390-930e-843a1f24c2ef" containerID="97ea39e12b895cfbfb3057514322954bdb50697f9b2cf264a89a8fc97b197890" exitCode=0 Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.450522 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" event={"ID":"3c871e72-423b-4390-930e-843a1f24c2ef","Type":"ContainerDied","Data":"97ea39e12b895cfbfb3057514322954bdb50697f9b2cf264a89a8fc97b197890"} Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.451011 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" event={"ID":"3c871e72-423b-4390-930e-843a1f24c2ef","Type":"ContainerDied","Data":"f3e4c48175e660a5e469841e1885a13d52cfc60e5a1373436e45d7dd7096efd1"} Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.451035 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3e4c48175e660a5e469841e1885a13d52cfc60e5a1373436e45d7dd7096efd1" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.514431 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.612884 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-sb\") pod \"3c871e72-423b-4390-930e-843a1f24c2ef\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.613105 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nrrn\" (UniqueName: \"kubernetes.io/projected/3c871e72-423b-4390-930e-843a1f24c2ef-kube-api-access-2nrrn\") pod \"3c871e72-423b-4390-930e-843a1f24c2ef\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.613178 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-config\") pod \"3c871e72-423b-4390-930e-843a1f24c2ef\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.613207 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-nb\") pod \"3c871e72-423b-4390-930e-843a1f24c2ef\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.614197 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-dns-svc\") pod \"3c871e72-423b-4390-930e-843a1f24c2ef\" (UID: \"3c871e72-423b-4390-930e-843a1f24c2ef\") " Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.631661 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c871e72-423b-4390-930e-843a1f24c2ef-kube-api-access-2nrrn" (OuterVolumeSpecName: "kube-api-access-2nrrn") pod "3c871e72-423b-4390-930e-843a1f24c2ef" (UID: "3c871e72-423b-4390-930e-843a1f24c2ef"). InnerVolumeSpecName "kube-api-access-2nrrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.656868 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-config" (OuterVolumeSpecName: "config") pod "3c871e72-423b-4390-930e-843a1f24c2ef" (UID: "3c871e72-423b-4390-930e-843a1f24c2ef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.661454 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3c871e72-423b-4390-930e-843a1f24c2ef" (UID: "3c871e72-423b-4390-930e-843a1f24c2ef"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.664748 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3c871e72-423b-4390-930e-843a1f24c2ef" (UID: "3c871e72-423b-4390-930e-843a1f24c2ef"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.665237 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:30:42 crc kubenswrapper[4773]: E0122 13:30:42.665630 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.669842 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3c871e72-423b-4390-930e-843a1f24c2ef" (UID: "3c871e72-423b-4390-930e-843a1f24c2ef"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.717958 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nrrn\" (UniqueName: \"kubernetes.io/projected/3c871e72-423b-4390-930e-843a1f24c2ef-kube-api-access-2nrrn\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.718000 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.718015 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.718029 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:42 crc kubenswrapper[4773]: I0122 13:30:42.718041 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3c871e72-423b-4390-930e-843a1f24c2ef-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:30:43 crc kubenswrapper[4773]: I0122 13:30:43.463222 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-846f6dc6f9-zqwqm" Jan 22 13:30:43 crc kubenswrapper[4773]: I0122 13:30:43.511904 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-846f6dc6f9-zqwqm"] Jan 22 13:30:43 crc kubenswrapper[4773]: I0122 13:30:43.518496 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-846f6dc6f9-zqwqm"] Jan 22 13:30:44 crc kubenswrapper[4773]: I0122 13:30:44.676348 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" path="/var/lib/kubelet/pods/3c871e72-423b-4390-930e-843a1f24c2ef/volumes" Jan 22 13:30:48 crc kubenswrapper[4773]: I0122 13:30:48.687423 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 13:30:54 crc kubenswrapper[4773]: I0122 13:30:54.658374 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:30:54 crc kubenswrapper[4773]: E0122 13:30:54.658989 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:31:04 crc kubenswrapper[4773]: I0122 13:31:04.892192 4773 scope.go:117] "RemoveContainer" containerID="cec8374999343be725f204def7ea4320bd68fec995e7730c0c90b96bfeb317a1" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.528690 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:05 crc kubenswrapper[4773]: E0122 13:31:05.529400 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" containerName="init" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.529422 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" containerName="init" Jan 22 13:31:05 crc kubenswrapper[4773]: E0122 13:31:05.529435 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" containerName="dnsmasq-dns" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.529442 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" containerName="dnsmasq-dns" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.529588 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c871e72-423b-4390-930e-843a1f24c2ef" containerName="dnsmasq-dns" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.530594 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.535389 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.540588 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.549708 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.549757 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dec947dc-9f54-4d4b-8435-6f8a07704a17-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.553479 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x97cj\" (UniqueName: \"kubernetes.io/projected/dec947dc-9f54-4d4b-8435-6f8a07704a17-kube-api-access-x97cj\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.554353 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-scripts\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.554420 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.554698 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.656259 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.656322 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dec947dc-9f54-4d4b-8435-6f8a07704a17-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.656348 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x97cj\" (UniqueName: \"kubernetes.io/projected/dec947dc-9f54-4d4b-8435-6f8a07704a17-kube-api-access-x97cj\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.656367 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-scripts\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.656393 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.656463 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.657482 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dec947dc-9f54-4d4b-8435-6f8a07704a17-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.663041 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.673867 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-scripts\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.674302 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.677797 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.685598 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x97cj\" (UniqueName: \"kubernetes.io/projected/dec947dc-9f54-4d4b-8435-6f8a07704a17-kube-api-access-x97cj\") pod \"cinder-scheduler-0\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:05 crc kubenswrapper[4773]: I0122 13:31:05.865326 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.383765 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.698946 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dec947dc-9f54-4d4b-8435-6f8a07704a17","Type":"ContainerStarted","Data":"ac8ff322c8bdffc79138824934ae22b6b8ef218008ab4d475ba41f633b3d33a2"} Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.838794 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.839062 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api-log" containerID="cri-o://a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5" gracePeriod=30 Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.839173 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api" containerID="cri-o://b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb" gracePeriod=30 Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.851968 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.57:8776/healthcheck\": EOF" Jan 22 13:31:06 crc kubenswrapper[4773]: I0122 13:31:06.852016 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.57:8776/healthcheck\": EOF" Jan 22 13:31:07 crc kubenswrapper[4773]: I0122 13:31:07.713017 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dec947dc-9f54-4d4b-8435-6f8a07704a17","Type":"ContainerStarted","Data":"4bd6b8126405c408a809ae9030a57cf5ce58b52eee906aa0e5ce3753f016c3ad"} Jan 22 13:31:07 crc kubenswrapper[4773]: I0122 13:31:07.714071 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dec947dc-9f54-4d4b-8435-6f8a07704a17","Type":"ContainerStarted","Data":"cb483fed83766c10c13e8cde7e1803452efef5493205217ae150401389e415bf"} Jan 22 13:31:07 crc kubenswrapper[4773]: I0122 13:31:07.715689 4773 generic.go:334] "Generic (PLEG): container finished" podID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerID="a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5" exitCode=143 Jan 22 13:31:07 crc kubenswrapper[4773]: I0122 13:31:07.715736 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d673a20a-c38d-4e53-9415-8cc26d618dc2","Type":"ContainerDied","Data":"a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5"} Jan 22 13:31:07 crc kubenswrapper[4773]: I0122 13:31:07.745456 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.745431977 podStartE2EDuration="2.745431977s" podCreationTimestamp="2026-01-22 13:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:07.738199354 +0000 UTC m=+5775.316315189" watchObservedRunningTime="2026-01-22 13:31:07.745431977 +0000 UTC m=+5775.323547802" Jan 22 13:31:09 crc kubenswrapper[4773]: I0122 13:31:09.658078 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:31:09 crc kubenswrapper[4773]: E0122 13:31:09.658749 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.653573 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.746699 4773 generic.go:334] "Generic (PLEG): container finished" podID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerID="b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb" exitCode=0 Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.746748 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d673a20a-c38d-4e53-9415-8cc26d618dc2","Type":"ContainerDied","Data":"b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb"} Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.746779 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"d673a20a-c38d-4e53-9415-8cc26d618dc2","Type":"ContainerDied","Data":"e1a45ff70890a5edb1144d665dbf500a840ed1b97af372296a14995301cb776c"} Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.746799 4773 scope.go:117] "RemoveContainer" containerID="b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.746979 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765219 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-combined-ca-bundle\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765376 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d673a20a-c38d-4e53-9415-8cc26d618dc2-etc-machine-id\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765438 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-internal-tls-certs\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765477 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-scripts\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765498 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data-custom\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765560 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rwzq\" (UniqueName: \"kubernetes.io/projected/d673a20a-c38d-4e53-9415-8cc26d618dc2-kube-api-access-6rwzq\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765604 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-public-tls-certs\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765632 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.765749 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d673a20a-c38d-4e53-9415-8cc26d618dc2-logs\") pod \"d673a20a-c38d-4e53-9415-8cc26d618dc2\" (UID: \"d673a20a-c38d-4e53-9415-8cc26d618dc2\") " Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.768136 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d673a20a-c38d-4e53-9415-8cc26d618dc2-logs" (OuterVolumeSpecName: "logs") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.769541 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d673a20a-c38d-4e53-9415-8cc26d618dc2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.775791 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d673a20a-c38d-4e53-9415-8cc26d618dc2-kube-api-access-6rwzq" (OuterVolumeSpecName: "kube-api-access-6rwzq") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "kube-api-access-6rwzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.781104 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.788981 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-scripts" (OuterVolumeSpecName: "scripts") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.800637 4773 scope.go:117] "RemoveContainer" containerID="a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.846831 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.857938 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868490 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868516 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868543 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d673a20a-c38d-4e53-9415-8cc26d618dc2-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868556 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868567 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d673a20a-c38d-4e53-9415-8cc26d618dc2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868578 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868588 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.868601 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rwzq\" (UniqueName: \"kubernetes.io/projected/d673a20a-c38d-4e53-9415-8cc26d618dc2-kube-api-access-6rwzq\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.895825 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.899234 4773 scope.go:117] "RemoveContainer" containerID="b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.899583 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data" (OuterVolumeSpecName: "config-data") pod "d673a20a-c38d-4e53-9415-8cc26d618dc2" (UID: "d673a20a-c38d-4e53-9415-8cc26d618dc2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:10 crc kubenswrapper[4773]: E0122 13:31:10.899860 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb\": container with ID starting with b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb not found: ID does not exist" containerID="b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.899928 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb"} err="failed to get container status \"b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb\": rpc error: code = NotFound desc = could not find container \"b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb\": container with ID starting with b337df5bd39c7ea9b3769bcf244281bab52372049812a5033ecd4f52246cf3cb not found: ID does not exist" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.899975 4773 scope.go:117] "RemoveContainer" containerID="a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5" Jan 22 13:31:10 crc kubenswrapper[4773]: E0122 13:31:10.900618 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5\": container with ID starting with a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5 not found: ID does not exist" containerID="a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.900672 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5"} err="failed to get container status \"a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5\": rpc error: code = NotFound desc = could not find container \"a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5\": container with ID starting with a81a54139b814ab0165e426560e3d4ee8dcf72b481bdf30acca9e9dc7ba7d4f5 not found: ID does not exist" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.970895 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:10 crc kubenswrapper[4773]: I0122 13:31:10.970996 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d673a20a-c38d-4e53-9415-8cc26d618dc2-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.114421 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.131120 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.140559 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:31:11 crc kubenswrapper[4773]: E0122 13:31:11.140984 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.141006 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api" Jan 22 13:31:11 crc kubenswrapper[4773]: E0122 13:31:11.141039 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api-log" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.141046 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api-log" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.141217 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.141235 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" containerName="cinder-api-log" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.142120 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.147060 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.147469 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.147634 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.149023 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174602 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjgk2\" (UniqueName: \"kubernetes.io/projected/a903c09b-df78-4752-8e8c-4b711299d7a5-kube-api-access-rjgk2\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174663 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174699 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a903c09b-df78-4752-8e8c-4b711299d7a5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174751 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-scripts\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174774 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-config-data\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174869 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-config-data-custom\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174908 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a903c09b-df78-4752-8e8c-4b711299d7a5-logs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.174981 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.175001 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.276096 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-config-data-custom\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.276889 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a903c09b-df78-4752-8e8c-4b711299d7a5-logs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277098 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277126 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277184 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjgk2\" (UniqueName: \"kubernetes.io/projected/a903c09b-df78-4752-8e8c-4b711299d7a5-kube-api-access-rjgk2\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277226 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a903c09b-df78-4752-8e8c-4b711299d7a5-logs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277232 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277300 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a903c09b-df78-4752-8e8c-4b711299d7a5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277343 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-scripts\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277361 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-config-data\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.277490 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a903c09b-df78-4752-8e8c-4b711299d7a5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.280464 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-config-data-custom\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.280945 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-scripts\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.281709 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.282400 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.282518 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-config-data\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.283205 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a903c09b-df78-4752-8e8c-4b711299d7a5-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.295653 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjgk2\" (UniqueName: \"kubernetes.io/projected/a903c09b-df78-4752-8e8c-4b711299d7a5-kube-api-access-rjgk2\") pod \"cinder-api-0\" (UID: \"a903c09b-df78-4752-8e8c-4b711299d7a5\") " pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.459705 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 22 13:31:11 crc kubenswrapper[4773]: I0122 13:31:11.954303 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 22 13:31:11 crc kubenswrapper[4773]: W0122 13:31:11.961680 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda903c09b_df78_4752_8e8c_4b711299d7a5.slice/crio-b5211daa483d8c8b076e34b70e89336f27e5e0922ba909423344e85ae38ef783 WatchSource:0}: Error finding container b5211daa483d8c8b076e34b70e89336f27e5e0922ba909423344e85ae38ef783: Status 404 returned error can't find the container with id b5211daa483d8c8b076e34b70e89336f27e5e0922ba909423344e85ae38ef783 Jan 22 13:31:12 crc kubenswrapper[4773]: I0122 13:31:12.673216 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d673a20a-c38d-4e53-9415-8cc26d618dc2" path="/var/lib/kubelet/pods/d673a20a-c38d-4e53-9415-8cc26d618dc2/volumes" Jan 22 13:31:12 crc kubenswrapper[4773]: I0122 13:31:12.772415 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a903c09b-df78-4752-8e8c-4b711299d7a5","Type":"ContainerStarted","Data":"3ff24c23cb9abc9db26564f63b186cc7a1f1af5ccfc37f921743c96d1dd48395"} Jan 22 13:31:12 crc kubenswrapper[4773]: I0122 13:31:12.772791 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a903c09b-df78-4752-8e8c-4b711299d7a5","Type":"ContainerStarted","Data":"b5211daa483d8c8b076e34b70e89336f27e5e0922ba909423344e85ae38ef783"} Jan 22 13:31:13 crc kubenswrapper[4773]: I0122 13:31:13.797928 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a903c09b-df78-4752-8e8c-4b711299d7a5","Type":"ContainerStarted","Data":"bb5d21b7eb81ea451bf2f36a3634382d650c86a86158615e7bc35eb8e9bb2012"} Jan 22 13:31:13 crc kubenswrapper[4773]: I0122 13:31:13.798558 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 22 13:31:13 crc kubenswrapper[4773]: I0122 13:31:13.834759 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.834728622 podStartE2EDuration="2.834728622s" podCreationTimestamp="2026-01-22 13:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:13.833762455 +0000 UTC m=+5781.411878370" watchObservedRunningTime="2026-01-22 13:31:13.834728622 +0000 UTC m=+5781.412844477" Jan 22 13:31:16 crc kubenswrapper[4773]: I0122 13:31:16.075065 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 13:31:16 crc kubenswrapper[4773]: I0122 13:31:16.128010 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:16 crc kubenswrapper[4773]: I0122 13:31:16.823649 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="cinder-scheduler" containerID="cri-o://cb483fed83766c10c13e8cde7e1803452efef5493205217ae150401389e415bf" gracePeriod=30 Jan 22 13:31:16 crc kubenswrapper[4773]: I0122 13:31:16.823736 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="probe" containerID="cri-o://4bd6b8126405c408a809ae9030a57cf5ce58b52eee906aa0e5ce3753f016c3ad" gracePeriod=30 Jan 22 13:31:17 crc kubenswrapper[4773]: I0122 13:31:17.836334 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dec947dc-9f54-4d4b-8435-6f8a07704a17","Type":"ContainerDied","Data":"4bd6b8126405c408a809ae9030a57cf5ce58b52eee906aa0e5ce3753f016c3ad"} Jan 22 13:31:17 crc kubenswrapper[4773]: I0122 13:31:17.836266 4773 generic.go:334] "Generic (PLEG): container finished" podID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerID="4bd6b8126405c408a809ae9030a57cf5ce58b52eee906aa0e5ce3753f016c3ad" exitCode=0 Jan 22 13:31:19 crc kubenswrapper[4773]: I0122 13:31:19.858468 4773 generic.go:334] "Generic (PLEG): container finished" podID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerID="cb483fed83766c10c13e8cde7e1803452efef5493205217ae150401389e415bf" exitCode=0 Jan 22 13:31:19 crc kubenswrapper[4773]: I0122 13:31:19.858517 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dec947dc-9f54-4d4b-8435-6f8a07704a17","Type":"ContainerDied","Data":"cb483fed83766c10c13e8cde7e1803452efef5493205217ae150401389e415bf"} Jan 22 13:31:19 crc kubenswrapper[4773]: I0122 13:31:19.979179 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152309 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dec947dc-9f54-4d4b-8435-6f8a07704a17-etc-machine-id\") pod \"dec947dc-9f54-4d4b-8435-6f8a07704a17\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152467 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dec947dc-9f54-4d4b-8435-6f8a07704a17-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dec947dc-9f54-4d4b-8435-6f8a07704a17" (UID: "dec947dc-9f54-4d4b-8435-6f8a07704a17"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152707 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-scripts\") pod \"dec947dc-9f54-4d4b-8435-6f8a07704a17\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152771 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-combined-ca-bundle\") pod \"dec947dc-9f54-4d4b-8435-6f8a07704a17\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152899 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data-custom\") pod \"dec947dc-9f54-4d4b-8435-6f8a07704a17\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152933 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x97cj\" (UniqueName: \"kubernetes.io/projected/dec947dc-9f54-4d4b-8435-6f8a07704a17-kube-api-access-x97cj\") pod \"dec947dc-9f54-4d4b-8435-6f8a07704a17\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.152965 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data\") pod \"dec947dc-9f54-4d4b-8435-6f8a07704a17\" (UID: \"dec947dc-9f54-4d4b-8435-6f8a07704a17\") " Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.153453 4773 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dec947dc-9f54-4d4b-8435-6f8a07704a17-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.169756 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dec947dc-9f54-4d4b-8435-6f8a07704a17" (UID: "dec947dc-9f54-4d4b-8435-6f8a07704a17"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.169794 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dec947dc-9f54-4d4b-8435-6f8a07704a17-kube-api-access-x97cj" (OuterVolumeSpecName: "kube-api-access-x97cj") pod "dec947dc-9f54-4d4b-8435-6f8a07704a17" (UID: "dec947dc-9f54-4d4b-8435-6f8a07704a17"). InnerVolumeSpecName "kube-api-access-x97cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.169808 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-scripts" (OuterVolumeSpecName: "scripts") pod "dec947dc-9f54-4d4b-8435-6f8a07704a17" (UID: "dec947dc-9f54-4d4b-8435-6f8a07704a17"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.219824 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dec947dc-9f54-4d4b-8435-6f8a07704a17" (UID: "dec947dc-9f54-4d4b-8435-6f8a07704a17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.254781 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.254818 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x97cj\" (UniqueName: \"kubernetes.io/projected/dec947dc-9f54-4d4b-8435-6f8a07704a17-kube-api-access-x97cj\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.254832 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.254846 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.288894 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data" (OuterVolumeSpecName: "config-data") pod "dec947dc-9f54-4d4b-8435-6f8a07704a17" (UID: "dec947dc-9f54-4d4b-8435-6f8a07704a17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.356327 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec947dc-9f54-4d4b-8435-6f8a07704a17-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.870750 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"dec947dc-9f54-4d4b-8435-6f8a07704a17","Type":"ContainerDied","Data":"ac8ff322c8bdffc79138824934ae22b6b8ef218008ab4d475ba41f633b3d33a2"} Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.870803 4773 scope.go:117] "RemoveContainer" containerID="4bd6b8126405c408a809ae9030a57cf5ce58b52eee906aa0e5ce3753f016c3ad" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.870916 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.897332 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.908445 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.912880 4773 scope.go:117] "RemoveContainer" containerID="cb483fed83766c10c13e8cde7e1803452efef5493205217ae150401389e415bf" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.931977 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:20 crc kubenswrapper[4773]: E0122 13:31:20.932535 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="probe" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.932553 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="probe" Jan 22 13:31:20 crc kubenswrapper[4773]: E0122 13:31:20.932610 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="cinder-scheduler" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.932618 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="cinder-scheduler" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.932872 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="probe" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.932893 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" containerName="cinder-scheduler" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.934693 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.938267 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 22 13:31:20 crc kubenswrapper[4773]: I0122 13:31:20.965699 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.069268 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.069676 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.069748 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.069775 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6zrn\" (UniqueName: \"kubernetes.io/projected/ea298d44-91e6-4904-82fd-31d7588d7980-kube-api-access-l6zrn\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.069947 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.069976 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea298d44-91e6-4904-82fd-31d7588d7980-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.172066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.172152 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea298d44-91e6-4904-82fd-31d7588d7980-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.172183 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.172231 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.172258 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea298d44-91e6-4904-82fd-31d7588d7980-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.173167 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.173200 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6zrn\" (UniqueName: \"kubernetes.io/projected/ea298d44-91e6-4904-82fd-31d7588d7980-kube-api-access-l6zrn\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.180216 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.180478 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.184226 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.203456 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea298d44-91e6-4904-82fd-31d7588d7980-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.217553 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6zrn\" (UniqueName: \"kubernetes.io/projected/ea298d44-91e6-4904-82fd-31d7588d7980-kube-api-access-l6zrn\") pod \"cinder-scheduler-0\" (UID: \"ea298d44-91e6-4904-82fd-31d7588d7980\") " pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.272831 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.727001 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 22 13:31:21 crc kubenswrapper[4773]: I0122 13:31:21.881528 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea298d44-91e6-4904-82fd-31d7588d7980","Type":"ContainerStarted","Data":"f089aa261542ed77d1d4cac2cf33edf059b7087b707560a88d04a2c9bfe1aaf9"} Jan 22 13:31:22 crc kubenswrapper[4773]: I0122 13:31:22.665054 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:31:22 crc kubenswrapper[4773]: E0122 13:31:22.665935 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:31:22 crc kubenswrapper[4773]: I0122 13:31:22.673281 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dec947dc-9f54-4d4b-8435-6f8a07704a17" path="/var/lib/kubelet/pods/dec947dc-9f54-4d4b-8435-6f8a07704a17/volumes" Jan 22 13:31:22 crc kubenswrapper[4773]: I0122 13:31:22.894487 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea298d44-91e6-4904-82fd-31d7588d7980","Type":"ContainerStarted","Data":"04016adaa768ea069a60ba654cf958fae2aab748b69e2f4ee600d732947e2a5a"} Jan 22 13:31:23 crc kubenswrapper[4773]: I0122 13:31:23.336637 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 22 13:31:23 crc kubenswrapper[4773]: I0122 13:31:23.910234 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea298d44-91e6-4904-82fd-31d7588d7980","Type":"ContainerStarted","Data":"5557ab8a604486238efbfbd684546ee188fb8c69d8b3359e628c155e5bb0dfc9"} Jan 22 13:31:23 crc kubenswrapper[4773]: I0122 13:31:23.947312 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.9472687730000002 podStartE2EDuration="3.947268773s" podCreationTimestamp="2026-01-22 13:31:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:23.935491552 +0000 UTC m=+5791.513607377" watchObservedRunningTime="2026-01-22 13:31:23.947268773 +0000 UTC m=+5791.525384598" Jan 22 13:31:26 crc kubenswrapper[4773]: I0122 13:31:26.273847 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 22 13:31:31 crc kubenswrapper[4773]: I0122 13:31:31.501255 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.586060 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-qk9s8"] Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.587585 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.601248 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-qk9s8"] Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.601512 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b97e980-5122-434b-8a97-699aa0fc5b28-operator-scripts\") pod \"glance-db-create-qk9s8\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.601641 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt9k2\" (UniqueName: \"kubernetes.io/projected/9b97e980-5122-434b-8a97-699aa0fc5b28-kube-api-access-lt9k2\") pod \"glance-db-create-qk9s8\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.690102 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-cc02-account-create-update-5g6hq"] Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.691767 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.694028 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.700616 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-cc02-account-create-update-5g6hq"] Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.703704 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt9k2\" (UniqueName: \"kubernetes.io/projected/9b97e980-5122-434b-8a97-699aa0fc5b28-kube-api-access-lt9k2\") pod \"glance-db-create-qk9s8\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.703805 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b97e980-5122-434b-8a97-699aa0fc5b28-operator-scripts\") pod \"glance-db-create-qk9s8\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.704644 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b97e980-5122-434b-8a97-699aa0fc5b28-operator-scripts\") pod \"glance-db-create-qk9s8\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.731413 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt9k2\" (UniqueName: \"kubernetes.io/projected/9b97e980-5122-434b-8a97-699aa0fc5b28-kube-api-access-lt9k2\") pod \"glance-db-create-qk9s8\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.806186 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv7n5\" (UniqueName: \"kubernetes.io/projected/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-kube-api-access-jv7n5\") pod \"glance-cc02-account-create-update-5g6hq\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.806520 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-operator-scripts\") pod \"glance-cc02-account-create-update-5g6hq\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.908689 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-operator-scripts\") pod \"glance-cc02-account-create-update-5g6hq\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.909214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv7n5\" (UniqueName: \"kubernetes.io/projected/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-kube-api-access-jv7n5\") pod \"glance-cc02-account-create-update-5g6hq\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.909833 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.909897 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-operator-scripts\") pod \"glance-cc02-account-create-update-5g6hq\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:32 crc kubenswrapper[4773]: I0122 13:31:32.930573 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv7n5\" (UniqueName: \"kubernetes.io/projected/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-kube-api-access-jv7n5\") pod \"glance-cc02-account-create-update-5g6hq\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:33 crc kubenswrapper[4773]: I0122 13:31:33.010095 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:34 crc kubenswrapper[4773]: I0122 13:31:34.207415 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-cc02-account-create-update-5g6hq"] Jan 22 13:31:34 crc kubenswrapper[4773]: I0122 13:31:34.221864 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-qk9s8"] Jan 22 13:31:34 crc kubenswrapper[4773]: W0122 13:31:34.238161 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b97e980_5122_434b_8a97_699aa0fc5b28.slice/crio-f9c62145f9f662d7b19acd25e2b15d143f2e154412c9c7ca8927890b6bdbc06a WatchSource:0}: Error finding container f9c62145f9f662d7b19acd25e2b15d143f2e154412c9c7ca8927890b6bdbc06a: Status 404 returned error can't find the container with id f9c62145f9f662d7b19acd25e2b15d143f2e154412c9c7ca8927890b6bdbc06a Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.031044 4773 generic.go:334] "Generic (PLEG): container finished" podID="62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" containerID="3ae0c7e257abcb0b792730b80e4a2f8be22c0c3bf49d85e5cb04050ecf9c9564" exitCode=0 Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.031119 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc02-account-create-update-5g6hq" event={"ID":"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d","Type":"ContainerDied","Data":"3ae0c7e257abcb0b792730b80e4a2f8be22c0c3bf49d85e5cb04050ecf9c9564"} Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.031532 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc02-account-create-update-5g6hq" event={"ID":"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d","Type":"ContainerStarted","Data":"bc87ef65214e726406951248a615f2dbbeff2169066482d9b686bd602af303f3"} Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.033949 4773 generic.go:334] "Generic (PLEG): container finished" podID="9b97e980-5122-434b-8a97-699aa0fc5b28" containerID="9b145b11eaac8d65487065181d030a20f7dce060a5f5e3777bdf9dce74db5563" exitCode=0 Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.034076 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qk9s8" event={"ID":"9b97e980-5122-434b-8a97-699aa0fc5b28","Type":"ContainerDied","Data":"9b145b11eaac8d65487065181d030a20f7dce060a5f5e3777bdf9dce74db5563"} Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.034141 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qk9s8" event={"ID":"9b97e980-5122-434b-8a97-699aa0fc5b28","Type":"ContainerStarted","Data":"f9c62145f9f662d7b19acd25e2b15d143f2e154412c9c7ca8927890b6bdbc06a"} Jan 22 13:31:35 crc kubenswrapper[4773]: I0122 13:31:35.659362 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:31:35 crc kubenswrapper[4773]: E0122 13:31:35.659746 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.441682 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.448666 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.581063 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jv7n5\" (UniqueName: \"kubernetes.io/projected/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-kube-api-access-jv7n5\") pod \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.581413 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt9k2\" (UniqueName: \"kubernetes.io/projected/9b97e980-5122-434b-8a97-699aa0fc5b28-kube-api-access-lt9k2\") pod \"9b97e980-5122-434b-8a97-699aa0fc5b28\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.581482 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-operator-scripts\") pod \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\" (UID: \"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d\") " Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.581506 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b97e980-5122-434b-8a97-699aa0fc5b28-operator-scripts\") pod \"9b97e980-5122-434b-8a97-699aa0fc5b28\" (UID: \"9b97e980-5122-434b-8a97-699aa0fc5b28\") " Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.582394 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" (UID: "62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.582439 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b97e980-5122-434b-8a97-699aa0fc5b28-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9b97e980-5122-434b-8a97-699aa0fc5b28" (UID: "9b97e980-5122-434b-8a97-699aa0fc5b28"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.586146 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-kube-api-access-jv7n5" (OuterVolumeSpecName: "kube-api-access-jv7n5") pod "62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" (UID: "62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d"). InnerVolumeSpecName "kube-api-access-jv7n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.586275 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b97e980-5122-434b-8a97-699aa0fc5b28-kube-api-access-lt9k2" (OuterVolumeSpecName: "kube-api-access-lt9k2") pod "9b97e980-5122-434b-8a97-699aa0fc5b28" (UID: "9b97e980-5122-434b-8a97-699aa0fc5b28"). InnerVolumeSpecName "kube-api-access-lt9k2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.683537 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jv7n5\" (UniqueName: \"kubernetes.io/projected/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-kube-api-access-jv7n5\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.683631 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt9k2\" (UniqueName: \"kubernetes.io/projected/9b97e980-5122-434b-8a97-699aa0fc5b28-kube-api-access-lt9k2\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.683641 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:36 crc kubenswrapper[4773]: I0122 13:31:36.683649 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b97e980-5122-434b-8a97-699aa0fc5b28-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.060995 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc02-account-create-update-5g6hq" event={"ID":"62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d","Type":"ContainerDied","Data":"bc87ef65214e726406951248a615f2dbbeff2169066482d9b686bd602af303f3"} Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.061040 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc87ef65214e726406951248a615f2dbbeff2169066482d9b686bd602af303f3" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.061395 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc02-account-create-update-5g6hq" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.062545 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qk9s8" event={"ID":"9b97e980-5122-434b-8a97-699aa0fc5b28","Type":"ContainerDied","Data":"f9c62145f9f662d7b19acd25e2b15d143f2e154412c9c7ca8927890b6bdbc06a"} Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.062591 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9c62145f9f662d7b19acd25e2b15d143f2e154412c9c7ca8927890b6bdbc06a" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.062671 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qk9s8" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.884977 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-28mrp"] Jan 22 13:31:37 crc kubenswrapper[4773]: E0122 13:31:37.885453 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" containerName="mariadb-account-create-update" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.885474 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" containerName="mariadb-account-create-update" Jan 22 13:31:37 crc kubenswrapper[4773]: E0122 13:31:37.885517 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b97e980-5122-434b-8a97-699aa0fc5b28" containerName="mariadb-database-create" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.885525 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b97e980-5122-434b-8a97-699aa0fc5b28" containerName="mariadb-database-create" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.885713 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" containerName="mariadb-account-create-update" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.885734 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b97e980-5122-434b-8a97-699aa0fc5b28" containerName="mariadb-database-create" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.886405 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.896361 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-28mrp"] Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.900605 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 22 13:31:37 crc kubenswrapper[4773]: I0122 13:31:37.901058 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-26scd" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.010353 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-db-sync-config-data\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.010508 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqz6q\" (UniqueName: \"kubernetes.io/projected/c9b2afff-3572-4d45-a41a-0fc77512da79-kube-api-access-bqz6q\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.010575 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-combined-ca-bundle\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.010802 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-config-data\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.112923 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-db-sync-config-data\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.112995 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqz6q\" (UniqueName: \"kubernetes.io/projected/c9b2afff-3572-4d45-a41a-0fc77512da79-kube-api-access-bqz6q\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.113041 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-combined-ca-bundle\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.113086 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-config-data\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.131600 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-combined-ca-bundle\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.131765 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-db-sync-config-data\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.132154 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-config-data\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.134815 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqz6q\" (UniqueName: \"kubernetes.io/projected/c9b2afff-3572-4d45-a41a-0fc77512da79-kube-api-access-bqz6q\") pod \"glance-db-sync-28mrp\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.220553 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:38 crc kubenswrapper[4773]: I0122 13:31:38.738649 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-28mrp"] Jan 22 13:31:38 crc kubenswrapper[4773]: W0122 13:31:38.745482 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9b2afff_3572_4d45_a41a_0fc77512da79.slice/crio-da345a5b20f0b573ee93ba5984612f148bf86dabd1da5dc55185dc3706eb4936 WatchSource:0}: Error finding container da345a5b20f0b573ee93ba5984612f148bf86dabd1da5dc55185dc3706eb4936: Status 404 returned error can't find the container with id da345a5b20f0b573ee93ba5984612f148bf86dabd1da5dc55185dc3706eb4936 Jan 22 13:31:39 crc kubenswrapper[4773]: I0122 13:31:39.081068 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-28mrp" event={"ID":"c9b2afff-3572-4d45-a41a-0fc77512da79","Type":"ContainerStarted","Data":"da345a5b20f0b573ee93ba5984612f148bf86dabd1da5dc55185dc3706eb4936"} Jan 22 13:31:40 crc kubenswrapper[4773]: I0122 13:31:40.093554 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-28mrp" event={"ID":"c9b2afff-3572-4d45-a41a-0fc77512da79","Type":"ContainerStarted","Data":"a7c0e14c9ba87fda15ef654a70a77c273bc712bb65e7901fbff0eac3d92a7fcb"} Jan 22 13:31:40 crc kubenswrapper[4773]: I0122 13:31:40.114153 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-28mrp" podStartSLOduration=3.114130834 podStartE2EDuration="3.114130834s" podCreationTimestamp="2026-01-22 13:31:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:40.108006542 +0000 UTC m=+5807.686122367" watchObservedRunningTime="2026-01-22 13:31:40.114130834 +0000 UTC m=+5807.692246669" Jan 22 13:31:43 crc kubenswrapper[4773]: I0122 13:31:43.147707 4773 generic.go:334] "Generic (PLEG): container finished" podID="c9b2afff-3572-4d45-a41a-0fc77512da79" containerID="a7c0e14c9ba87fda15ef654a70a77c273bc712bb65e7901fbff0eac3d92a7fcb" exitCode=0 Jan 22 13:31:43 crc kubenswrapper[4773]: I0122 13:31:43.147788 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-28mrp" event={"ID":"c9b2afff-3572-4d45-a41a-0fc77512da79","Type":"ContainerDied","Data":"a7c0e14c9ba87fda15ef654a70a77c273bc712bb65e7901fbff0eac3d92a7fcb"} Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.572599 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.761176 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-config-data\") pod \"c9b2afff-3572-4d45-a41a-0fc77512da79\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.761357 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqz6q\" (UniqueName: \"kubernetes.io/projected/c9b2afff-3572-4d45-a41a-0fc77512da79-kube-api-access-bqz6q\") pod \"c9b2afff-3572-4d45-a41a-0fc77512da79\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.761411 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-db-sync-config-data\") pod \"c9b2afff-3572-4d45-a41a-0fc77512da79\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.761471 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-combined-ca-bundle\") pod \"c9b2afff-3572-4d45-a41a-0fc77512da79\" (UID: \"c9b2afff-3572-4d45-a41a-0fc77512da79\") " Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.770460 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9b2afff-3572-4d45-a41a-0fc77512da79-kube-api-access-bqz6q" (OuterVolumeSpecName: "kube-api-access-bqz6q") pod "c9b2afff-3572-4d45-a41a-0fc77512da79" (UID: "c9b2afff-3572-4d45-a41a-0fc77512da79"). InnerVolumeSpecName "kube-api-access-bqz6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.770762 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c9b2afff-3572-4d45-a41a-0fc77512da79" (UID: "c9b2afff-3572-4d45-a41a-0fc77512da79"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.807987 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9b2afff-3572-4d45-a41a-0fc77512da79" (UID: "c9b2afff-3572-4d45-a41a-0fc77512da79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.821711 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-config-data" (OuterVolumeSpecName: "config-data") pod "c9b2afff-3572-4d45-a41a-0fc77512da79" (UID: "c9b2afff-3572-4d45-a41a-0fc77512da79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.864139 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqz6q\" (UniqueName: \"kubernetes.io/projected/c9b2afff-3572-4d45-a41a-0fc77512da79-kube-api-access-bqz6q\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.864177 4773 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.864188 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:44 crc kubenswrapper[4773]: I0122 13:31:44.864198 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9b2afff-3572-4d45-a41a-0fc77512da79-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.172957 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-28mrp" event={"ID":"c9b2afff-3572-4d45-a41a-0fc77512da79","Type":"ContainerDied","Data":"da345a5b20f0b573ee93ba5984612f148bf86dabd1da5dc55185dc3706eb4936"} Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.174017 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da345a5b20f0b573ee93ba5984612f148bf86dabd1da5dc55185dc3706eb4936" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.173051 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-28mrp" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.478500 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:45 crc kubenswrapper[4773]: E0122 13:31:45.478951 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b2afff-3572-4d45-a41a-0fc77512da79" containerName="glance-db-sync" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.478967 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b2afff-3572-4d45-a41a-0fc77512da79" containerName="glance-db-sync" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.479185 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b2afff-3572-4d45-a41a-0fc77512da79" containerName="glance-db-sync" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.480356 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.485341 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.485704 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.486024 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-26scd" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.495794 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.580417 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-logs\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.580482 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd8vs\" (UniqueName: \"kubernetes.io/projected/62f6f30f-3724-4f26-921f-beabd40704a7-kube-api-access-pd8vs\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.580538 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.580568 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.580635 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.580678 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.624547 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f564d54dc-92l6s"] Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.626126 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.638773 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f564d54dc-92l6s"] Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681300 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681350 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681377 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwkf2\" (UniqueName: \"kubernetes.io/projected/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-kube-api-access-vwkf2\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681402 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-dns-svc\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681442 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681479 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681499 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-config\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681552 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-nb\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681574 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-sb\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681599 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-logs\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.681619 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd8vs\" (UniqueName: \"kubernetes.io/projected/62f6f30f-3724-4f26-921f-beabd40704a7-kube-api-access-pd8vs\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.682809 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.683598 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-logs\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.691608 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.691866 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-scripts\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.697049 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-config-data\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.699534 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd8vs\" (UniqueName: \"kubernetes.io/projected/62f6f30f-3724-4f26-921f-beabd40704a7-kube-api-access-pd8vs\") pod \"glance-default-external-api-0\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.762864 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.764425 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.766237 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.775153 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.787348 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwkf2\" (UniqueName: \"kubernetes.io/projected/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-kube-api-access-vwkf2\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.787418 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-dns-svc\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.787492 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-config\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.787538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-nb\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.787562 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-sb\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.788544 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-sb\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.789203 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-config\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.789387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-dns-svc\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.789646 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-nb\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.803941 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.825884 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwkf2\" (UniqueName: \"kubernetes.io/projected/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-kube-api-access-vwkf2\") pod \"dnsmasq-dns-f564d54dc-92l6s\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.890067 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.890166 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.890260 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.890417 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnzl9\" (UniqueName: \"kubernetes.io/projected/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-kube-api-access-rnzl9\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.890537 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-logs\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.890601 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.943228 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.992736 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993111 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnzl9\" (UniqueName: \"kubernetes.io/projected/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-kube-api-access-rnzl9\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993198 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-logs\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993231 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993251 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993349 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993414 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:45 crc kubenswrapper[4773]: I0122 13:31:45.993645 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-logs\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:45.999064 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.001137 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.002681 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.022683 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnzl9\" (UniqueName: \"kubernetes.io/projected/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-kube-api-access-rnzl9\") pod \"glance-default-internal-api-0\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.125090 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.402177 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.582007 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f564d54dc-92l6s"] Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.772885 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:46 crc kubenswrapper[4773]: I0122 13:31:46.950833 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.132882 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6mkgj"] Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.134588 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.146579 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6mkgj"] Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.204533 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91","Type":"ContainerStarted","Data":"a90796c0f2477bd276e3f839024b167cd8c1a9baef2e235cf54281dd81db1dda"} Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.212659 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"62f6f30f-3724-4f26-921f-beabd40704a7","Type":"ContainerStarted","Data":"d53a78399016ecaebc2b7a2f47dae94969027a853500bc79ca8c4dc7ca90cb2b"} Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.215511 4773 generic.go:334] "Generic (PLEG): container finished" podID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerID="870c34f13e58cc0070107751fdb3c4e6e88adc01a42e6f7ba53bb5edea998008" exitCode=0 Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.215571 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" event={"ID":"c2a3be15-a6cc-4af3-b9c2-b902529c61ff","Type":"ContainerDied","Data":"870c34f13e58cc0070107751fdb3c4e6e88adc01a42e6f7ba53bb5edea998008"} Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.215599 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" event={"ID":"c2a3be15-a6cc-4af3-b9c2-b902529c61ff","Type":"ContainerStarted","Data":"fc96032704fd49543e188ed4e6bf8139fb433b2fc1881b9d3a68200e0627c293"} Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.231182 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-catalog-content\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.231556 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-utilities\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.231648 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzw8m\" (UniqueName: \"kubernetes.io/projected/92fb6822-1c99-476d-a223-dd1cc9f92ab8-kube-api-access-zzw8m\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.333106 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-utilities\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.333251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzw8m\" (UniqueName: \"kubernetes.io/projected/92fb6822-1c99-476d-a223-dd1cc9f92ab8-kube-api-access-zzw8m\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.333313 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-catalog-content\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.334034 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-catalog-content\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.334881 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-utilities\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.351518 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzw8m\" (UniqueName: \"kubernetes.io/projected/92fb6822-1c99-476d-a223-dd1cc9f92ab8-kube-api-access-zzw8m\") pod \"redhat-operators-6mkgj\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.454877 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.886710 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:47 crc kubenswrapper[4773]: I0122 13:31:47.930294 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6mkgj"] Jan 22 13:31:47 crc kubenswrapper[4773]: W0122 13:31:47.953963 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92fb6822_1c99_476d_a223_dd1cc9f92ab8.slice/crio-28a1d481e20db684e8519845c499c6ebbe31f9cc964ceffc14c9eb465de7a5c5 WatchSource:0}: Error finding container 28a1d481e20db684e8519845c499c6ebbe31f9cc964ceffc14c9eb465de7a5c5: Status 404 returned error can't find the container with id 28a1d481e20db684e8519845c499c6ebbe31f9cc964ceffc14c9eb465de7a5c5 Jan 22 13:31:48 crc kubenswrapper[4773]: I0122 13:31:48.230209 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" event={"ID":"c2a3be15-a6cc-4af3-b9c2-b902529c61ff","Type":"ContainerStarted","Data":"659a54590daa2c2223e8d67dede427df68af499875efa923f015e05294de6cae"} Jan 22 13:31:48 crc kubenswrapper[4773]: I0122 13:31:48.230493 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:48 crc kubenswrapper[4773]: I0122 13:31:48.232820 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91","Type":"ContainerStarted","Data":"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d"} Jan 22 13:31:48 crc kubenswrapper[4773]: I0122 13:31:48.234676 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerStarted","Data":"28a1d481e20db684e8519845c499c6ebbe31f9cc964ceffc14c9eb465de7a5c5"} Jan 22 13:31:48 crc kubenswrapper[4773]: I0122 13:31:48.236844 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"62f6f30f-3724-4f26-921f-beabd40704a7","Type":"ContainerStarted","Data":"b0bf9b24987bcbbe24746219a8211767bb2de96578f40362c67ae2dbd9a16c56"} Jan 22 13:31:48 crc kubenswrapper[4773]: I0122 13:31:48.253210 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" podStartSLOduration=3.253192238 podStartE2EDuration="3.253192238s" podCreationTimestamp="2026-01-22 13:31:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:48.249459673 +0000 UTC m=+5815.827575498" watchObservedRunningTime="2026-01-22 13:31:48.253192238 +0000 UTC m=+5815.831308063" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.246362 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91","Type":"ContainerStarted","Data":"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0"} Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.246453 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-log" containerID="cri-o://77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d" gracePeriod=30 Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.246795 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-httpd" containerID="cri-o://0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0" gracePeriod=30 Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.248392 4773 generic.go:334] "Generic (PLEG): container finished" podID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerID="dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4" exitCode=0 Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.248475 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerDied","Data":"dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4"} Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.250633 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.252069 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"62f6f30f-3724-4f26-921f-beabd40704a7","Type":"ContainerStarted","Data":"84f23f7ef901a1c256302b77a60f6f82063e013cc35d465ee6d63ca69c4702cf"} Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.252173 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-log" containerID="cri-o://b0bf9b24987bcbbe24746219a8211767bb2de96578f40362c67ae2dbd9a16c56" gracePeriod=30 Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.252309 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-httpd" containerID="cri-o://84f23f7ef901a1c256302b77a60f6f82063e013cc35d465ee6d63ca69c4702cf" gracePeriod=30 Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.287589 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.28756167 podStartE2EDuration="4.28756167s" podCreationTimestamp="2026-01-22 13:31:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:49.276264423 +0000 UTC m=+5816.854380248" watchObservedRunningTime="2026-01-22 13:31:49.28756167 +0000 UTC m=+5816.865677495" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.330519 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.330492427 podStartE2EDuration="4.330492427s" podCreationTimestamp="2026-01-22 13:31:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:49.321330709 +0000 UTC m=+5816.899446554" watchObservedRunningTime="2026-01-22 13:31:49.330492427 +0000 UTC m=+5816.908608262" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.908139 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.993725 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-config-data\") pod \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994093 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-httpd-run\") pod \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994121 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-logs\") pod \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994219 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-combined-ca-bundle\") pod \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994391 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnzl9\" (UniqueName: \"kubernetes.io/projected/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-kube-api-access-rnzl9\") pod \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994510 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-scripts\") pod \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\" (UID: \"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91\") " Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994626 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" (UID: "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.994683 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-logs" (OuterVolumeSpecName: "logs") pod "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" (UID: "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.995535 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.995566 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:49 crc kubenswrapper[4773]: I0122 13:31:49.999945 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-kube-api-access-rnzl9" (OuterVolumeSpecName: "kube-api-access-rnzl9") pod "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" (UID: "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91"). InnerVolumeSpecName "kube-api-access-rnzl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.000686 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-scripts" (OuterVolumeSpecName: "scripts") pod "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" (UID: "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.019665 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" (UID: "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.041958 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-config-data" (OuterVolumeSpecName: "config-data") pod "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" (UID: "8faa7f35-a2aa-4147-8e4f-7bd7f0883f91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.096798 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.096831 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.096843 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.096854 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnzl9\" (UniqueName: \"kubernetes.io/projected/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91-kube-api-access-rnzl9\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265026 4773 generic.go:334] "Generic (PLEG): container finished" podID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerID="0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0" exitCode=0 Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265064 4773 generic.go:334] "Generic (PLEG): container finished" podID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerID="77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d" exitCode=143 Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265071 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265121 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91","Type":"ContainerDied","Data":"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0"} Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91","Type":"ContainerDied","Data":"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d"} Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265187 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8faa7f35-a2aa-4147-8e4f-7bd7f0883f91","Type":"ContainerDied","Data":"a90796c0f2477bd276e3f839024b167cd8c1a9baef2e235cf54281dd81db1dda"} Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.265206 4773 scope.go:117] "RemoveContainer" containerID="0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.272797 4773 generic.go:334] "Generic (PLEG): container finished" podID="62f6f30f-3724-4f26-921f-beabd40704a7" containerID="84f23f7ef901a1c256302b77a60f6f82063e013cc35d465ee6d63ca69c4702cf" exitCode=0 Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.272833 4773 generic.go:334] "Generic (PLEG): container finished" podID="62f6f30f-3724-4f26-921f-beabd40704a7" containerID="b0bf9b24987bcbbe24746219a8211767bb2de96578f40362c67ae2dbd9a16c56" exitCode=143 Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.272878 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"62f6f30f-3724-4f26-921f-beabd40704a7","Type":"ContainerDied","Data":"84f23f7ef901a1c256302b77a60f6f82063e013cc35d465ee6d63ca69c4702cf"} Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.272940 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"62f6f30f-3724-4f26-921f-beabd40704a7","Type":"ContainerDied","Data":"b0bf9b24987bcbbe24746219a8211767bb2de96578f40362c67ae2dbd9a16c56"} Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.316084 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.324714 4773 scope.go:117] "RemoveContainer" containerID="77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.346488 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.357695 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:50 crc kubenswrapper[4773]: E0122 13:31:50.358410 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-httpd" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.358430 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-httpd" Jan 22 13:31:50 crc kubenswrapper[4773]: E0122 13:31:50.358446 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-log" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.358454 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-log" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.358677 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-log" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.358707 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" containerName="glance-httpd" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.360100 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.364167 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.364616 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.370277 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.399695 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.400043 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.400069 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-logs\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.400132 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.400153 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwggh\" (UniqueName: \"kubernetes.io/projected/0440d00c-cf93-434f-b150-00fba0e9b5c3-kube-api-access-dwggh\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.400171 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.400194 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.458853 4773 scope.go:117] "RemoveContainer" containerID="0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0" Jan 22 13:31:50 crc kubenswrapper[4773]: E0122 13:31:50.464419 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0\": container with ID starting with 0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0 not found: ID does not exist" containerID="0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.464472 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0"} err="failed to get container status \"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0\": rpc error: code = NotFound desc = could not find container \"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0\": container with ID starting with 0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0 not found: ID does not exist" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.464499 4773 scope.go:117] "RemoveContainer" containerID="77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d" Jan 22 13:31:50 crc kubenswrapper[4773]: E0122 13:31:50.465060 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d\": container with ID starting with 77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d not found: ID does not exist" containerID="77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.465117 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d"} err="failed to get container status \"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d\": rpc error: code = NotFound desc = could not find container \"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d\": container with ID starting with 77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d not found: ID does not exist" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.465154 4773 scope.go:117] "RemoveContainer" containerID="0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.467975 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0"} err="failed to get container status \"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0\": rpc error: code = NotFound desc = could not find container \"0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0\": container with ID starting with 0152d68f3258c33a4b5c51aaf0cca580496e2113534e05a5ad6311e623f7bea0 not found: ID does not exist" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.468016 4773 scope.go:117] "RemoveContainer" containerID="77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.468508 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d"} err="failed to get container status \"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d\": rpc error: code = NotFound desc = could not find container \"77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d\": container with ID starting with 77f6d321a7bb4e3b49b0cb946a8b433cbd9b882d71d343f8b3645a96806d994d not found: ID does not exist" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502145 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-logs\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502292 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502330 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwggh\" (UniqueName: \"kubernetes.io/projected/0440d00c-cf93-434f-b150-00fba0e9b5c3-kube-api-access-dwggh\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502352 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502388 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502472 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502515 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.502780 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-logs\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.503107 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.506694 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.506770 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.511399 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.525627 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.533554 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwggh\" (UniqueName: \"kubernetes.io/projected/0440d00c-cf93-434f-b150-00fba0e9b5c3-kube-api-access-dwggh\") pod \"glance-default-internal-api-0\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.604637 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.659404 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:31:50 crc kubenswrapper[4773]: E0122 13:31:50.659687 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.668985 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8faa7f35-a2aa-4147-8e4f-7bd7f0883f91" path="/var/lib/kubelet/pods/8faa7f35-a2aa-4147-8e4f-7bd7f0883f91/volumes" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.754748 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.805688 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-scripts\") pod \"62f6f30f-3724-4f26-921f-beabd40704a7\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.805741 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-httpd-run\") pod \"62f6f30f-3724-4f26-921f-beabd40704a7\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.805858 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-combined-ca-bundle\") pod \"62f6f30f-3724-4f26-921f-beabd40704a7\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.805888 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-logs\") pod \"62f6f30f-3724-4f26-921f-beabd40704a7\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.805972 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd8vs\" (UniqueName: \"kubernetes.io/projected/62f6f30f-3724-4f26-921f-beabd40704a7-kube-api-access-pd8vs\") pod \"62f6f30f-3724-4f26-921f-beabd40704a7\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.805998 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-config-data\") pod \"62f6f30f-3724-4f26-921f-beabd40704a7\" (UID: \"62f6f30f-3724-4f26-921f-beabd40704a7\") " Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.806965 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-logs" (OuterVolumeSpecName: "logs") pod "62f6f30f-3724-4f26-921f-beabd40704a7" (UID: "62f6f30f-3724-4f26-921f-beabd40704a7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.807247 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "62f6f30f-3724-4f26-921f-beabd40704a7" (UID: "62f6f30f-3724-4f26-921f-beabd40704a7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.808514 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.808543 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f6f30f-3724-4f26-921f-beabd40704a7-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.822580 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f6f30f-3724-4f26-921f-beabd40704a7-kube-api-access-pd8vs" (OuterVolumeSpecName: "kube-api-access-pd8vs") pod "62f6f30f-3724-4f26-921f-beabd40704a7" (UID: "62f6f30f-3724-4f26-921f-beabd40704a7"). InnerVolumeSpecName "kube-api-access-pd8vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.822931 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-scripts" (OuterVolumeSpecName: "scripts") pod "62f6f30f-3724-4f26-921f-beabd40704a7" (UID: "62f6f30f-3724-4f26-921f-beabd40704a7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.857681 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62f6f30f-3724-4f26-921f-beabd40704a7" (UID: "62f6f30f-3724-4f26-921f-beabd40704a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.893974 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-config-data" (OuterVolumeSpecName: "config-data") pod "62f6f30f-3724-4f26-921f-beabd40704a7" (UID: "62f6f30f-3724-4f26-921f-beabd40704a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.911606 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.911640 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd8vs\" (UniqueName: \"kubernetes.io/projected/62f6f30f-3724-4f26-921f-beabd40704a7-kube-api-access-pd8vs\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.911651 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:50 crc kubenswrapper[4773]: I0122 13:31:50.911662 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f6f30f-3724-4f26-921f-beabd40704a7-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.286593 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerStarted","Data":"ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1"} Jan 22 13:31:51 crc kubenswrapper[4773]: W0122 13:31:51.287458 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0440d00c_cf93_434f_b150_00fba0e9b5c3.slice/crio-2090e992faa73dee2aa6f9faf6299600b13f3c2b692dad81be681cbaa337d185 WatchSource:0}: Error finding container 2090e992faa73dee2aa6f9faf6299600b13f3c2b692dad81be681cbaa337d185: Status 404 returned error can't find the container with id 2090e992faa73dee2aa6f9faf6299600b13f3c2b692dad81be681cbaa337d185 Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.289869 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"62f6f30f-3724-4f26-921f-beabd40704a7","Type":"ContainerDied","Data":"d53a78399016ecaebc2b7a2f47dae94969027a853500bc79ca8c4dc7ca90cb2b"} Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.289901 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.289936 4773 scope.go:117] "RemoveContainer" containerID="84f23f7ef901a1c256302b77a60f6f82063e013cc35d465ee6d63ca69c4702cf" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.290008 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.334096 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.347731 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.350130 4773 scope.go:117] "RemoveContainer" containerID="b0bf9b24987bcbbe24746219a8211767bb2de96578f40362c67ae2dbd9a16c56" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.365848 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:51 crc kubenswrapper[4773]: E0122 13:31:51.366369 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-log" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.366395 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-log" Jan 22 13:31:51 crc kubenswrapper[4773]: E0122 13:31:51.366419 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-httpd" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.366428 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-httpd" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.366651 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-log" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.366676 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" containerName="glance-httpd" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.374536 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.376140 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.377446 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.377632 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522622 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-scripts\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522716 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svhx7\" (UniqueName: \"kubernetes.io/projected/fa5d8b60-5b94-446a-83df-e780e1d74a73-kube-api-access-svhx7\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522778 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-config-data\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522827 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-logs\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522882 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522907 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.522979 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624457 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-logs\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624561 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624618 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624658 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-scripts\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624704 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svhx7\" (UniqueName: \"kubernetes.io/projected/fa5d8b60-5b94-446a-83df-e780e1d74a73-kube-api-access-svhx7\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624765 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-config-data\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.624949 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-logs\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.625241 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.630577 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.631714 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-config-data\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.631814 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.633242 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-scripts\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.643108 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svhx7\" (UniqueName: \"kubernetes.io/projected/fa5d8b60-5b94-446a-83df-e780e1d74a73-kube-api-access-svhx7\") pod \"glance-default-external-api-0\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " pod="openstack/glance-default-external-api-0" Jan 22 13:31:51 crc kubenswrapper[4773]: I0122 13:31:51.778464 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:31:52 crc kubenswrapper[4773]: I0122 13:31:52.319311 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:31:52 crc kubenswrapper[4773]: I0122 13:31:52.321879 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0440d00c-cf93-434f-b150-00fba0e9b5c3","Type":"ContainerStarted","Data":"817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008"} Jan 22 13:31:52 crc kubenswrapper[4773]: I0122 13:31:52.321987 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0440d00c-cf93-434f-b150-00fba0e9b5c3","Type":"ContainerStarted","Data":"2090e992faa73dee2aa6f9faf6299600b13f3c2b692dad81be681cbaa337d185"} Jan 22 13:31:52 crc kubenswrapper[4773]: W0122 13:31:52.320723 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa5d8b60_5b94_446a_83df_e780e1d74a73.slice/crio-ff3ef2a5c1d079662ff035b0246630e2eb48ecd347ea885f43cf9492a32c5936 WatchSource:0}: Error finding container ff3ef2a5c1d079662ff035b0246630e2eb48ecd347ea885f43cf9492a32c5936: Status 404 returned error can't find the container with id ff3ef2a5c1d079662ff035b0246630e2eb48ecd347ea885f43cf9492a32c5936 Jan 22 13:31:52 crc kubenswrapper[4773]: I0122 13:31:52.325603 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerDied","Data":"ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1"} Jan 22 13:31:52 crc kubenswrapper[4773]: I0122 13:31:52.325632 4773 generic.go:334] "Generic (PLEG): container finished" podID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerID="ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1" exitCode=0 Jan 22 13:31:52 crc kubenswrapper[4773]: I0122 13:31:52.672418 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f6f30f-3724-4f26-921f-beabd40704a7" path="/var/lib/kubelet/pods/62f6f30f-3724-4f26-921f-beabd40704a7/volumes" Jan 22 13:31:53 crc kubenswrapper[4773]: I0122 13:31:53.339423 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0440d00c-cf93-434f-b150-00fba0e9b5c3","Type":"ContainerStarted","Data":"5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d"} Jan 22 13:31:53 crc kubenswrapper[4773]: I0122 13:31:53.343551 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fa5d8b60-5b94-446a-83df-e780e1d74a73","Type":"ContainerStarted","Data":"c970860f98f655be88245c885168058e0391fa019530eda57b32e8f4dc167366"} Jan 22 13:31:53 crc kubenswrapper[4773]: I0122 13:31:53.343599 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fa5d8b60-5b94-446a-83df-e780e1d74a73","Type":"ContainerStarted","Data":"ff3ef2a5c1d079662ff035b0246630e2eb48ecd347ea885f43cf9492a32c5936"} Jan 22 13:31:53 crc kubenswrapper[4773]: I0122 13:31:53.349878 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerStarted","Data":"b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030"} Jan 22 13:31:53 crc kubenswrapper[4773]: I0122 13:31:53.374254 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.374201629 podStartE2EDuration="3.374201629s" podCreationTimestamp="2026-01-22 13:31:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:53.360957276 +0000 UTC m=+5820.939073101" watchObservedRunningTime="2026-01-22 13:31:53.374201629 +0000 UTC m=+5820.952317474" Jan 22 13:31:53 crc kubenswrapper[4773]: I0122 13:31:53.391537 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6mkgj" podStartSLOduration=2.8846402209999997 podStartE2EDuration="6.391511565s" podCreationTimestamp="2026-01-22 13:31:47 +0000 UTC" firstStartedPulling="2026-01-22 13:31:49.250349584 +0000 UTC m=+5816.828465409" lastFinishedPulling="2026-01-22 13:31:52.757220928 +0000 UTC m=+5820.335336753" observedRunningTime="2026-01-22 13:31:53.38814336 +0000 UTC m=+5820.966259185" watchObservedRunningTime="2026-01-22 13:31:53.391511565 +0000 UTC m=+5820.969627390" Jan 22 13:31:54 crc kubenswrapper[4773]: I0122 13:31:54.366553 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fa5d8b60-5b94-446a-83df-e780e1d74a73","Type":"ContainerStarted","Data":"8eda4a136c8d5dd95e331ab944bd359cdd4640180be8b037255ea110865bd120"} Jan 22 13:31:54 crc kubenswrapper[4773]: I0122 13:31:54.386989 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.386967893 podStartE2EDuration="3.386967893s" podCreationTimestamp="2026-01-22 13:31:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:31:54.382772305 +0000 UTC m=+5821.960888140" watchObservedRunningTime="2026-01-22 13:31:54.386967893 +0000 UTC m=+5821.965083708" Jan 22 13:31:55 crc kubenswrapper[4773]: I0122 13:31:55.945477 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:31:56 crc kubenswrapper[4773]: I0122 13:31:56.002217 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bfd78d95-7pw9h"] Jan 22 13:31:56 crc kubenswrapper[4773]: I0122 13:31:56.002549 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="dnsmasq-dns" containerID="cri-o://9a7665f59fb03a07996049d567d9ec7e30102b98ed27d9f32c5f60d6f423493e" gracePeriod=10 Jan 22 13:31:56 crc kubenswrapper[4773]: I0122 13:31:56.997936 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.55:5353: connect: connection refused" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.455673 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.456010 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.495637 4773 generic.go:334] "Generic (PLEG): container finished" podID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerID="9a7665f59fb03a07996049d567d9ec7e30102b98ed27d9f32c5f60d6f423493e" exitCode=0 Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.495691 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" event={"ID":"13856aea-6a69-4cd9-b4b8-276d639956e7","Type":"ContainerDied","Data":"9a7665f59fb03a07996049d567d9ec7e30102b98ed27d9f32c5f60d6f423493e"} Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.495748 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" event={"ID":"13856aea-6a69-4cd9-b4b8-276d639956e7","Type":"ContainerDied","Data":"9e5f3d359182d37b12beeadb4a2d7049c7be6b8f3a2c3131bd1134b5f7ddd699"} Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.495759 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e5f3d359182d37b12beeadb4a2d7049c7be6b8f3a2c3131bd1134b5f7ddd699" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.496063 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.648905 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-sb\") pod \"13856aea-6a69-4cd9-b4b8-276d639956e7\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.649032 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-config\") pod \"13856aea-6a69-4cd9-b4b8-276d639956e7\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.649059 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-nb\") pod \"13856aea-6a69-4cd9-b4b8-276d639956e7\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.649167 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-dns-svc\") pod \"13856aea-6a69-4cd9-b4b8-276d639956e7\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.649222 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvbtd\" (UniqueName: \"kubernetes.io/projected/13856aea-6a69-4cd9-b4b8-276d639956e7-kube-api-access-zvbtd\") pod \"13856aea-6a69-4cd9-b4b8-276d639956e7\" (UID: \"13856aea-6a69-4cd9-b4b8-276d639956e7\") " Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.658428 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13856aea-6a69-4cd9-b4b8-276d639956e7-kube-api-access-zvbtd" (OuterVolumeSpecName: "kube-api-access-zvbtd") pod "13856aea-6a69-4cd9-b4b8-276d639956e7" (UID: "13856aea-6a69-4cd9-b4b8-276d639956e7"). InnerVolumeSpecName "kube-api-access-zvbtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.697835 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "13856aea-6a69-4cd9-b4b8-276d639956e7" (UID: "13856aea-6a69-4cd9-b4b8-276d639956e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.704135 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-config" (OuterVolumeSpecName: "config") pod "13856aea-6a69-4cd9-b4b8-276d639956e7" (UID: "13856aea-6a69-4cd9-b4b8-276d639956e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.704635 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "13856aea-6a69-4cd9-b4b8-276d639956e7" (UID: "13856aea-6a69-4cd9-b4b8-276d639956e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.706305 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "13856aea-6a69-4cd9-b4b8-276d639956e7" (UID: "13856aea-6a69-4cd9-b4b8-276d639956e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.751708 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.751745 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.751755 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.751765 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvbtd\" (UniqueName: \"kubernetes.io/projected/13856aea-6a69-4cd9-b4b8-276d639956e7-kube-api-access-zvbtd\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:57 crc kubenswrapper[4773]: I0122 13:31:57.751774 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/13856aea-6a69-4cd9-b4b8-276d639956e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:31:58 crc kubenswrapper[4773]: I0122 13:31:58.504099 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bfd78d95-7pw9h" Jan 22 13:31:58 crc kubenswrapper[4773]: I0122 13:31:58.513990 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6mkgj" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="registry-server" probeResult="failure" output=< Jan 22 13:31:58 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:31:58 crc kubenswrapper[4773]: > Jan 22 13:31:58 crc kubenswrapper[4773]: I0122 13:31:58.548182 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bfd78d95-7pw9h"] Jan 22 13:31:58 crc kubenswrapper[4773]: I0122 13:31:58.558984 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86bfd78d95-7pw9h"] Jan 22 13:31:58 crc kubenswrapper[4773]: I0122 13:31:58.673657 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" path="/var/lib/kubelet/pods/13856aea-6a69-4cd9-b4b8-276d639956e7/volumes" Jan 22 13:32:00 crc kubenswrapper[4773]: I0122 13:32:00.755787 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:00 crc kubenswrapper[4773]: I0122 13:32:00.756109 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:00 crc kubenswrapper[4773]: I0122 13:32:00.783081 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:00 crc kubenswrapper[4773]: I0122 13:32:00.806026 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:01 crc kubenswrapper[4773]: I0122 13:32:01.536091 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:01 crc kubenswrapper[4773]: I0122 13:32:01.536160 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:01 crc kubenswrapper[4773]: I0122 13:32:01.778677 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 13:32:01 crc kubenswrapper[4773]: I0122 13:32:01.778733 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 13:32:01 crc kubenswrapper[4773]: I0122 13:32:01.805971 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 13:32:01 crc kubenswrapper[4773]: I0122 13:32:01.821142 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 13:32:02 crc kubenswrapper[4773]: I0122 13:32:02.543412 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 13:32:02 crc kubenswrapper[4773]: I0122 13:32:02.543804 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 13:32:03 crc kubenswrapper[4773]: I0122 13:32:03.553380 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:03 crc kubenswrapper[4773]: I0122 13:32:03.553519 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 13:32:03 crc kubenswrapper[4773]: I0122 13:32:03.604996 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 13:32:03 crc kubenswrapper[4773]: I0122 13:32:03.658908 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:32:03 crc kubenswrapper[4773]: E0122 13:32:03.659194 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:32:04 crc kubenswrapper[4773]: I0122 13:32:04.585165 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 13:32:04 crc kubenswrapper[4773]: I0122 13:32:04.585730 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 13:32:04 crc kubenswrapper[4773]: I0122 13:32:04.774494 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 13:32:04 crc kubenswrapper[4773]: I0122 13:32:04.976126 4773 scope.go:117] "RemoveContainer" containerID="1718aa9395072deb53bd3750ede88c266ed64896ea7f0c9d9b0614dcb7dce59f" Jan 22 13:32:05 crc kubenswrapper[4773]: I0122 13:32:05.015623 4773 scope.go:117] "RemoveContainer" containerID="d4fcb7f54800a45cfef6b302cda49d727782b0b3411ad6b7b244961d4ec521bb" Jan 22 13:32:05 crc kubenswrapper[4773]: I0122 13:32:05.073226 4773 scope.go:117] "RemoveContainer" containerID="eecca40acdc37965ab1832b81d359abdee0bd58e08f07e13a06f245b5c8b37b9" Jan 22 13:32:07 crc kubenswrapper[4773]: I0122 13:32:07.500667 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:32:07 crc kubenswrapper[4773]: I0122 13:32:07.553179 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:32:07 crc kubenswrapper[4773]: I0122 13:32:07.737986 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6mkgj"] Jan 22 13:32:08 crc kubenswrapper[4773]: I0122 13:32:08.596508 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6mkgj" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="registry-server" containerID="cri-o://b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030" gracePeriod=2 Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.031028 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.171504 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzw8m\" (UniqueName: \"kubernetes.io/projected/92fb6822-1c99-476d-a223-dd1cc9f92ab8-kube-api-access-zzw8m\") pod \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.171952 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-utilities\") pod \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.172108 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-catalog-content\") pod \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\" (UID: \"92fb6822-1c99-476d-a223-dd1cc9f92ab8\") " Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.173065 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-utilities" (OuterVolumeSpecName: "utilities") pod "92fb6822-1c99-476d-a223-dd1cc9f92ab8" (UID: "92fb6822-1c99-476d-a223-dd1cc9f92ab8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.177195 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92fb6822-1c99-476d-a223-dd1cc9f92ab8-kube-api-access-zzw8m" (OuterVolumeSpecName: "kube-api-access-zzw8m") pod "92fb6822-1c99-476d-a223-dd1cc9f92ab8" (UID: "92fb6822-1c99-476d-a223-dd1cc9f92ab8"). InnerVolumeSpecName "kube-api-access-zzw8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.277349 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzw8m\" (UniqueName: \"kubernetes.io/projected/92fb6822-1c99-476d-a223-dd1cc9f92ab8-kube-api-access-zzw8m\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.277645 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.317006 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "92fb6822-1c99-476d-a223-dd1cc9f92ab8" (UID: "92fb6822-1c99-476d-a223-dd1cc9f92ab8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.379490 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92fb6822-1c99-476d-a223-dd1cc9f92ab8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.607740 4773 generic.go:334] "Generic (PLEG): container finished" podID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerID="b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030" exitCode=0 Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.607782 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerDied","Data":"b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030"} Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.607809 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6mkgj" event={"ID":"92fb6822-1c99-476d-a223-dd1cc9f92ab8","Type":"ContainerDied","Data":"28a1d481e20db684e8519845c499c6ebbe31f9cc964ceffc14c9eb465de7a5c5"} Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.607831 4773 scope.go:117] "RemoveContainer" containerID="b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.607993 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6mkgj" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.641369 4773 scope.go:117] "RemoveContainer" containerID="ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.642962 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6mkgj"] Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.661776 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6mkgj"] Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.674527 4773 scope.go:117] "RemoveContainer" containerID="dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.701193 4773 scope.go:117] "RemoveContainer" containerID="b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030" Jan 22 13:32:09 crc kubenswrapper[4773]: E0122 13:32:09.701668 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030\": container with ID starting with b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030 not found: ID does not exist" containerID="b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.701712 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030"} err="failed to get container status \"b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030\": rpc error: code = NotFound desc = could not find container \"b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030\": container with ID starting with b3896968efaa794272fed8409869bd526e814a256ac638cad7692a9a2f5e5030 not found: ID does not exist" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.701739 4773 scope.go:117] "RemoveContainer" containerID="ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1" Jan 22 13:32:09 crc kubenswrapper[4773]: E0122 13:32:09.702233 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1\": container with ID starting with ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1 not found: ID does not exist" containerID="ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.702260 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1"} err="failed to get container status \"ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1\": rpc error: code = NotFound desc = could not find container \"ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1\": container with ID starting with ed3b486c5b463f38000b200d19aed40f8c53a70c685c2e2261d89b3d9f315ce1 not found: ID does not exist" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.702278 4773 scope.go:117] "RemoveContainer" containerID="dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4" Jan 22 13:32:09 crc kubenswrapper[4773]: E0122 13:32:09.702574 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4\": container with ID starting with dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4 not found: ID does not exist" containerID="dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4" Jan 22 13:32:09 crc kubenswrapper[4773]: I0122 13:32:09.702603 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4"} err="failed to get container status \"dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4\": rpc error: code = NotFound desc = could not find container \"dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4\": container with ID starting with dab51e20820a8a73093c9cf9dd283f0214b5850d5450dcea22ca3af55e6194f4 not found: ID does not exist" Jan 22 13:32:10 crc kubenswrapper[4773]: I0122 13:32:10.674265 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" path="/var/lib/kubelet/pods/92fb6822-1c99-476d-a223-dd1cc9f92ab8/volumes" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.829901 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-tlcxh"] Jan 22 13:32:12 crc kubenswrapper[4773]: E0122 13:32:12.830589 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="registry-server" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830604 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="registry-server" Jan 22 13:32:12 crc kubenswrapper[4773]: E0122 13:32:12.830616 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="extract-content" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830622 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="extract-content" Jan 22 13:32:12 crc kubenswrapper[4773]: E0122 13:32:12.830634 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="extract-utilities" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830641 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="extract-utilities" Jan 22 13:32:12 crc kubenswrapper[4773]: E0122 13:32:12.830670 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="init" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830675 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="init" Jan 22 13:32:12 crc kubenswrapper[4773]: E0122 13:32:12.830687 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="dnsmasq-dns" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830693 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="dnsmasq-dns" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830908 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="13856aea-6a69-4cd9-b4b8-276d639956e7" containerName="dnsmasq-dns" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.830936 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="92fb6822-1c99-476d-a223-dd1cc9f92ab8" containerName="registry-server" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.831647 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.845804 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tlcxh"] Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.929674 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-70b4-account-create-update-vwxwn"] Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.931506 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.934321 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.939217 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-70b4-account-create-update-vwxwn"] Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.962423 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrl5k\" (UniqueName: \"kubernetes.io/projected/98ea57e8-c88e-49c3-b559-60c51c38e4cc-kube-api-access-xrl5k\") pod \"placement-db-create-tlcxh\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:12 crc kubenswrapper[4773]: I0122 13:32:12.962773 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ea57e8-c88e-49c3-b559-60c51c38e4cc-operator-scripts\") pod \"placement-db-create-tlcxh\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.065299 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrqpw\" (UniqueName: \"kubernetes.io/projected/55fea942-2605-41d2-bb57-d93c99d5c4bb-kube-api-access-jrqpw\") pod \"placement-70b4-account-create-update-vwxwn\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.065390 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55fea942-2605-41d2-bb57-d93c99d5c4bb-operator-scripts\") pod \"placement-70b4-account-create-update-vwxwn\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.065446 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrl5k\" (UniqueName: \"kubernetes.io/projected/98ea57e8-c88e-49c3-b559-60c51c38e4cc-kube-api-access-xrl5k\") pod \"placement-db-create-tlcxh\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.065542 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ea57e8-c88e-49c3-b559-60c51c38e4cc-operator-scripts\") pod \"placement-db-create-tlcxh\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.066413 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ea57e8-c88e-49c3-b559-60c51c38e4cc-operator-scripts\") pod \"placement-db-create-tlcxh\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.084876 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrl5k\" (UniqueName: \"kubernetes.io/projected/98ea57e8-c88e-49c3-b559-60c51c38e4cc-kube-api-access-xrl5k\") pod \"placement-db-create-tlcxh\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.159188 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.167400 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrqpw\" (UniqueName: \"kubernetes.io/projected/55fea942-2605-41d2-bb57-d93c99d5c4bb-kube-api-access-jrqpw\") pod \"placement-70b4-account-create-update-vwxwn\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.167452 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55fea942-2605-41d2-bb57-d93c99d5c4bb-operator-scripts\") pod \"placement-70b4-account-create-update-vwxwn\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.168227 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55fea942-2605-41d2-bb57-d93c99d5c4bb-operator-scripts\") pod \"placement-70b4-account-create-update-vwxwn\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.188347 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrqpw\" (UniqueName: \"kubernetes.io/projected/55fea942-2605-41d2-bb57-d93c99d5c4bb-kube-api-access-jrqpw\") pod \"placement-70b4-account-create-update-vwxwn\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.251055 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.612725 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-tlcxh"] Jan 22 13:32:13 crc kubenswrapper[4773]: W0122 13:32:13.618095 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98ea57e8_c88e_49c3_b559_60c51c38e4cc.slice/crio-b3b546586fcc481e01f4b49a17c79ad9f2b2be54eacea363485e0713848729d6 WatchSource:0}: Error finding container b3b546586fcc481e01f4b49a17c79ad9f2b2be54eacea363485e0713848729d6: Status 404 returned error can't find the container with id b3b546586fcc481e01f4b49a17c79ad9f2b2be54eacea363485e0713848729d6 Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.656629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tlcxh" event={"ID":"98ea57e8-c88e-49c3-b559-60c51c38e4cc","Type":"ContainerStarted","Data":"b3b546586fcc481e01f4b49a17c79ad9f2b2be54eacea363485e0713848729d6"} Jan 22 13:32:13 crc kubenswrapper[4773]: I0122 13:32:13.712805 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-70b4-account-create-update-vwxwn"] Jan 22 13:32:13 crc kubenswrapper[4773]: W0122 13:32:13.718650 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55fea942_2605_41d2_bb57_d93c99d5c4bb.slice/crio-aa64085b512b23099e0f22f304c63279844c911e06ea3b3b6d74a0b2a13a6267 WatchSource:0}: Error finding container aa64085b512b23099e0f22f304c63279844c911e06ea3b3b6d74a0b2a13a6267: Status 404 returned error can't find the container with id aa64085b512b23099e0f22f304c63279844c911e06ea3b3b6d74a0b2a13a6267 Jan 22 13:32:14 crc kubenswrapper[4773]: I0122 13:32:14.669039 4773 generic.go:334] "Generic (PLEG): container finished" podID="55fea942-2605-41d2-bb57-d93c99d5c4bb" containerID="f023786d20596fa62037b57318b35bac9a95bfb1c95b50e36480b32abb111f9b" exitCode=0 Jan 22 13:32:14 crc kubenswrapper[4773]: I0122 13:32:14.670099 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70b4-account-create-update-vwxwn" event={"ID":"55fea942-2605-41d2-bb57-d93c99d5c4bb","Type":"ContainerDied","Data":"f023786d20596fa62037b57318b35bac9a95bfb1c95b50e36480b32abb111f9b"} Jan 22 13:32:14 crc kubenswrapper[4773]: I0122 13:32:14.670125 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70b4-account-create-update-vwxwn" event={"ID":"55fea942-2605-41d2-bb57-d93c99d5c4bb","Type":"ContainerStarted","Data":"aa64085b512b23099e0f22f304c63279844c911e06ea3b3b6d74a0b2a13a6267"} Jan 22 13:32:14 crc kubenswrapper[4773]: I0122 13:32:14.673083 4773 generic.go:334] "Generic (PLEG): container finished" podID="98ea57e8-c88e-49c3-b559-60c51c38e4cc" containerID="36faddcba4325b02c2eb374cc80a53b512107b7bd78e35cce56b7b647853cd9b" exitCode=0 Jan 22 13:32:14 crc kubenswrapper[4773]: I0122 13:32:14.673135 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tlcxh" event={"ID":"98ea57e8-c88e-49c3-b559-60c51c38e4cc","Type":"ContainerDied","Data":"36faddcba4325b02c2eb374cc80a53b512107b7bd78e35cce56b7b647853cd9b"} Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.111008 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.122920 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.221557 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrqpw\" (UniqueName: \"kubernetes.io/projected/55fea942-2605-41d2-bb57-d93c99d5c4bb-kube-api-access-jrqpw\") pod \"55fea942-2605-41d2-bb57-d93c99d5c4bb\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.221757 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ea57e8-c88e-49c3-b559-60c51c38e4cc-operator-scripts\") pod \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.221894 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrl5k\" (UniqueName: \"kubernetes.io/projected/98ea57e8-c88e-49c3-b559-60c51c38e4cc-kube-api-access-xrl5k\") pod \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\" (UID: \"98ea57e8-c88e-49c3-b559-60c51c38e4cc\") " Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.222041 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55fea942-2605-41d2-bb57-d93c99d5c4bb-operator-scripts\") pod \"55fea942-2605-41d2-bb57-d93c99d5c4bb\" (UID: \"55fea942-2605-41d2-bb57-d93c99d5c4bb\") " Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.222737 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98ea57e8-c88e-49c3-b559-60c51c38e4cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "98ea57e8-c88e-49c3-b559-60c51c38e4cc" (UID: "98ea57e8-c88e-49c3-b559-60c51c38e4cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.222739 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55fea942-2605-41d2-bb57-d93c99d5c4bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "55fea942-2605-41d2-bb57-d93c99d5c4bb" (UID: "55fea942-2605-41d2-bb57-d93c99d5c4bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.226949 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55fea942-2605-41d2-bb57-d93c99d5c4bb-kube-api-access-jrqpw" (OuterVolumeSpecName: "kube-api-access-jrqpw") pod "55fea942-2605-41d2-bb57-d93c99d5c4bb" (UID: "55fea942-2605-41d2-bb57-d93c99d5c4bb"). InnerVolumeSpecName "kube-api-access-jrqpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.227825 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98ea57e8-c88e-49c3-b559-60c51c38e4cc-kube-api-access-xrl5k" (OuterVolumeSpecName: "kube-api-access-xrl5k") pod "98ea57e8-c88e-49c3-b559-60c51c38e4cc" (UID: "98ea57e8-c88e-49c3-b559-60c51c38e4cc"). InnerVolumeSpecName "kube-api-access-xrl5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.324325 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrl5k\" (UniqueName: \"kubernetes.io/projected/98ea57e8-c88e-49c3-b559-60c51c38e4cc-kube-api-access-xrl5k\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.324368 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55fea942-2605-41d2-bb57-d93c99d5c4bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.324384 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrqpw\" (UniqueName: \"kubernetes.io/projected/55fea942-2605-41d2-bb57-d93c99d5c4bb-kube-api-access-jrqpw\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.324399 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98ea57e8-c88e-49c3-b559-60c51c38e4cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.695929 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-tlcxh" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.695974 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-tlcxh" event={"ID":"98ea57e8-c88e-49c3-b559-60c51c38e4cc","Type":"ContainerDied","Data":"b3b546586fcc481e01f4b49a17c79ad9f2b2be54eacea363485e0713848729d6"} Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.696047 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3b546586fcc481e01f4b49a17c79ad9f2b2be54eacea363485e0713848729d6" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.697816 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70b4-account-create-update-vwxwn" event={"ID":"55fea942-2605-41d2-bb57-d93c99d5c4bb","Type":"ContainerDied","Data":"aa64085b512b23099e0f22f304c63279844c911e06ea3b3b6d74a0b2a13a6267"} Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.697848 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa64085b512b23099e0f22f304c63279844c911e06ea3b3b6d74a0b2a13a6267" Jan 22 13:32:16 crc kubenswrapper[4773]: I0122 13:32:16.698069 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70b4-account-create-update-vwxwn" Jan 22 13:32:17 crc kubenswrapper[4773]: I0122 13:32:17.658131 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:32:17 crc kubenswrapper[4773]: E0122 13:32:17.658740 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.244384 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fdd6f7b5-8qdg8"] Jan 22 13:32:18 crc kubenswrapper[4773]: E0122 13:32:18.245255 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55fea942-2605-41d2-bb57-d93c99d5c4bb" containerName="mariadb-account-create-update" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.245278 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="55fea942-2605-41d2-bb57-d93c99d5c4bb" containerName="mariadb-account-create-update" Jan 22 13:32:18 crc kubenswrapper[4773]: E0122 13:32:18.245339 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ea57e8-c88e-49c3-b559-60c51c38e4cc" containerName="mariadb-database-create" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.245350 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ea57e8-c88e-49c3-b559-60c51c38e4cc" containerName="mariadb-database-create" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.245563 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="55fea942-2605-41d2-bb57-d93c99d5c4bb" containerName="mariadb-account-create-update" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.245584 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="98ea57e8-c88e-49c3-b559-60c51c38e4cc" containerName="mariadb-database-create" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.249196 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.265803 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fdd6f7b5-8qdg8"] Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.302996 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-tmksn"] Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.307545 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.310162 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.310576 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ph45r" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.314300 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.321025 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tmksn"] Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.364809 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr7gh\" (UniqueName: \"kubernetes.io/projected/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-kube-api-access-jr7gh\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.364887 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-sb\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.364991 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-config\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.365024 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-dns-svc\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.365042 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-nb\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466602 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-config\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466671 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-dns-svc\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466696 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-nb\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466745 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr7gh\" (UniqueName: \"kubernetes.io/projected/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-kube-api-access-jr7gh\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466786 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-config-data\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466823 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-logs\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466850 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-sb\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466890 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-scripts\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.466926 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v6bs\" (UniqueName: \"kubernetes.io/projected/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-kube-api-access-8v6bs\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.467022 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-combined-ca-bundle\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.467698 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-config\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.468445 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-nb\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.468526 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-sb\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.468833 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-dns-svc\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.494407 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr7gh\" (UniqueName: \"kubernetes.io/projected/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-kube-api-access-jr7gh\") pod \"dnsmasq-dns-7fdd6f7b5-8qdg8\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.568647 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-combined-ca-bundle\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.568746 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-config-data\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.568775 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-logs\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.568810 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-scripts\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.568842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v6bs\" (UniqueName: \"kubernetes.io/projected/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-kube-api-access-8v6bs\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.570121 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-logs\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.574767 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-scripts\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.575338 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-config-data\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.576805 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-combined-ca-bundle\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.577558 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.592832 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v6bs\" (UniqueName: \"kubernetes.io/projected/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-kube-api-access-8v6bs\") pod \"placement-db-sync-tmksn\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:18 crc kubenswrapper[4773]: I0122 13:32:18.638017 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.064778 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fdd6f7b5-8qdg8"] Jan 22 13:32:19 crc kubenswrapper[4773]: W0122 13:32:19.221582 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57ca37d9_91fa_4049_b3e0_d90b1b12ec11.slice/crio-b6835817b8f0ec3d20dad95e0e5dc5212d3a9faebe622ed4eeb2c687e5ddcc7e WatchSource:0}: Error finding container b6835817b8f0ec3d20dad95e0e5dc5212d3a9faebe622ed4eeb2c687e5ddcc7e: Status 404 returned error can't find the container with id b6835817b8f0ec3d20dad95e0e5dc5212d3a9faebe622ed4eeb2c687e5ddcc7e Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.222908 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-tmksn"] Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.756996 4773 generic.go:334] "Generic (PLEG): container finished" podID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerID="d2405f45091a425cc22317c171ff888915a10b6e93fcf3773e0cb11de4d5ab0a" exitCode=0 Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.757188 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" event={"ID":"d35443f6-ccfd-4bb1-b1e2-f1899be1b916","Type":"ContainerDied","Data":"d2405f45091a425cc22317c171ff888915a10b6e93fcf3773e0cb11de4d5ab0a"} Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.757338 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" event={"ID":"d35443f6-ccfd-4bb1-b1e2-f1899be1b916","Type":"ContainerStarted","Data":"771b2c1472693cb3075f79b09d6f34f1aad469cf4c94e943322fc213d1490fd2"} Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.759926 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tmksn" event={"ID":"57ca37d9-91fa-4049-b3e0-d90b1b12ec11","Type":"ContainerStarted","Data":"329c449ce303338e0507c2f42208035d62e3137c28fbea301712edfb46222a26"} Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.759969 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tmksn" event={"ID":"57ca37d9-91fa-4049-b3e0-d90b1b12ec11","Type":"ContainerStarted","Data":"b6835817b8f0ec3d20dad95e0e5dc5212d3a9faebe622ed4eeb2c687e5ddcc7e"} Jan 22 13:32:19 crc kubenswrapper[4773]: I0122 13:32:19.810696 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-tmksn" podStartSLOduration=1.810671106 podStartE2EDuration="1.810671106s" podCreationTimestamp="2026-01-22 13:32:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:32:19.798859304 +0000 UTC m=+5847.376975129" watchObservedRunningTime="2026-01-22 13:32:19.810671106 +0000 UTC m=+5847.388786931" Jan 22 13:32:20 crc kubenswrapper[4773]: I0122 13:32:20.771811 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" event={"ID":"d35443f6-ccfd-4bb1-b1e2-f1899be1b916","Type":"ContainerStarted","Data":"a2a90ccf4bd5fcbd2dacea3a1d716a5077c295770778c356968639f6cab9774f"} Jan 22 13:32:20 crc kubenswrapper[4773]: I0122 13:32:20.797368 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" podStartSLOduration=2.797344127 podStartE2EDuration="2.797344127s" podCreationTimestamp="2026-01-22 13:32:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:32:20.79391003 +0000 UTC m=+5848.372025865" watchObservedRunningTime="2026-01-22 13:32:20.797344127 +0000 UTC m=+5848.375459972" Jan 22 13:32:21 crc kubenswrapper[4773]: I0122 13:32:21.784679 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:22 crc kubenswrapper[4773]: I0122 13:32:22.798403 4773 generic.go:334] "Generic (PLEG): container finished" podID="57ca37d9-91fa-4049-b3e0-d90b1b12ec11" containerID="329c449ce303338e0507c2f42208035d62e3137c28fbea301712edfb46222a26" exitCode=0 Jan 22 13:32:22 crc kubenswrapper[4773]: I0122 13:32:22.798527 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tmksn" event={"ID":"57ca37d9-91fa-4049-b3e0-d90b1b12ec11","Type":"ContainerDied","Data":"329c449ce303338e0507c2f42208035d62e3137c28fbea301712edfb46222a26"} Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.124863 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.188671 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-logs\") pod \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.188830 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-combined-ca-bundle\") pod \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.188865 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v6bs\" (UniqueName: \"kubernetes.io/projected/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-kube-api-access-8v6bs\") pod \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.188902 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-scripts\") pod \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.188922 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-config-data\") pod \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\" (UID: \"57ca37d9-91fa-4049-b3e0-d90b1b12ec11\") " Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.189819 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-logs" (OuterVolumeSpecName: "logs") pod "57ca37d9-91fa-4049-b3e0-d90b1b12ec11" (UID: "57ca37d9-91fa-4049-b3e0-d90b1b12ec11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.200553 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-kube-api-access-8v6bs" (OuterVolumeSpecName: "kube-api-access-8v6bs") pod "57ca37d9-91fa-4049-b3e0-d90b1b12ec11" (UID: "57ca37d9-91fa-4049-b3e0-d90b1b12ec11"). InnerVolumeSpecName "kube-api-access-8v6bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.203025 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-scripts" (OuterVolumeSpecName: "scripts") pod "57ca37d9-91fa-4049-b3e0-d90b1b12ec11" (UID: "57ca37d9-91fa-4049-b3e0-d90b1b12ec11"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.227984 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-config-data" (OuterVolumeSpecName: "config-data") pod "57ca37d9-91fa-4049-b3e0-d90b1b12ec11" (UID: "57ca37d9-91fa-4049-b3e0-d90b1b12ec11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.228257 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57ca37d9-91fa-4049-b3e0-d90b1b12ec11" (UID: "57ca37d9-91fa-4049-b3e0-d90b1b12ec11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.291193 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.291258 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.291274 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v6bs\" (UniqueName: \"kubernetes.io/projected/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-kube-api-access-8v6bs\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.291297 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.291305 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57ca37d9-91fa-4049-b3e0-d90b1b12ec11-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.825556 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-tmksn" event={"ID":"57ca37d9-91fa-4049-b3e0-d90b1b12ec11","Type":"ContainerDied","Data":"b6835817b8f0ec3d20dad95e0e5dc5212d3a9faebe622ed4eeb2c687e5ddcc7e"} Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.825620 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6835817b8f0ec3d20dad95e0e5dc5212d3a9faebe622ed4eeb2c687e5ddcc7e" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.825673 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-tmksn" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.956519 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7dc858bb76-pr6m5"] Jan 22 13:32:24 crc kubenswrapper[4773]: E0122 13:32:24.957082 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57ca37d9-91fa-4049-b3e0-d90b1b12ec11" containerName="placement-db-sync" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.957107 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="57ca37d9-91fa-4049-b3e0-d90b1b12ec11" containerName="placement-db-sync" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.957276 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="57ca37d9-91fa-4049-b3e0-d90b1b12ec11" containerName="placement-db-sync" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.958514 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.960680 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-ph45r" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.960948 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.960978 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.961189 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.968748 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 22 13:32:24 crc kubenswrapper[4773]: I0122 13:32:24.977241 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7dc858bb76-pr6m5"] Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.107963 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-internal-tls-certs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.108135 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-scripts\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.108200 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-combined-ca-bundle\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.108258 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e597eff2-b887-4521-8118-3ff1fb86742c-logs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.108478 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-config-data\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.108604 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-public-tls-certs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.108699 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdc49\" (UniqueName: \"kubernetes.io/projected/e597eff2-b887-4521-8118-3ff1fb86742c-kube-api-access-kdc49\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210121 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-internal-tls-certs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210257 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-scripts\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210313 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-combined-ca-bundle\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210344 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e597eff2-b887-4521-8118-3ff1fb86742c-logs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210365 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-config-data\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210406 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-public-tls-certs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.210442 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdc49\" (UniqueName: \"kubernetes.io/projected/e597eff2-b887-4521-8118-3ff1fb86742c-kube-api-access-kdc49\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.211224 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e597eff2-b887-4521-8118-3ff1fb86742c-logs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.214362 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-config-data\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.214926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-internal-tls-certs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.215014 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-combined-ca-bundle\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.215254 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-public-tls-certs\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.219353 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e597eff2-b887-4521-8118-3ff1fb86742c-scripts\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.229427 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdc49\" (UniqueName: \"kubernetes.io/projected/e597eff2-b887-4521-8118-3ff1fb86742c-kube-api-access-kdc49\") pod \"placement-7dc858bb76-pr6m5\" (UID: \"e597eff2-b887-4521-8118-3ff1fb86742c\") " pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.279091 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.733773 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7dc858bb76-pr6m5"] Jan 22 13:32:25 crc kubenswrapper[4773]: I0122 13:32:25.839461 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7dc858bb76-pr6m5" event={"ID":"e597eff2-b887-4521-8118-3ff1fb86742c","Type":"ContainerStarted","Data":"9e730fe7d8dd5ffc17794e79c49b3ac31dd53459c1660ac0e56f9d64c8119a46"} Jan 22 13:32:26 crc kubenswrapper[4773]: I0122 13:32:26.854363 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7dc858bb76-pr6m5" event={"ID":"e597eff2-b887-4521-8118-3ff1fb86742c","Type":"ContainerStarted","Data":"09ac75acd52dd70bd235f6cc8b1f91bc2e9e6bb4251d4e7447a6ed6a769f6d97"} Jan 22 13:32:26 crc kubenswrapper[4773]: I0122 13:32:26.854710 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7dc858bb76-pr6m5" event={"ID":"e597eff2-b887-4521-8118-3ff1fb86742c","Type":"ContainerStarted","Data":"18f48cb44591c81ee8aab4a87b2d91920f6319f081329feeac2f83a53f8d57aa"} Jan 22 13:32:26 crc kubenswrapper[4773]: I0122 13:32:26.854741 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:26 crc kubenswrapper[4773]: I0122 13:32:26.896524 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7dc858bb76-pr6m5" podStartSLOduration=2.896484708 podStartE2EDuration="2.896484708s" podCreationTimestamp="2026-01-22 13:32:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:32:26.875137958 +0000 UTC m=+5854.453253833" watchObservedRunningTime="2026-01-22 13:32:26.896484708 +0000 UTC m=+5854.474600583" Jan 22 13:32:27 crc kubenswrapper[4773]: I0122 13:32:27.861949 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:28 crc kubenswrapper[4773]: I0122 13:32:28.579483 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:32:28 crc kubenswrapper[4773]: I0122 13:32:28.649096 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f564d54dc-92l6s"] Jan 22 13:32:28 crc kubenswrapper[4773]: I0122 13:32:28.649556 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerName="dnsmasq-dns" containerID="cri-o://659a54590daa2c2223e8d67dede427df68af499875efa923f015e05294de6cae" gracePeriod=10 Jan 22 13:32:28 crc kubenswrapper[4773]: I0122 13:32:28.657926 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:32:28 crc kubenswrapper[4773]: E0122 13:32:28.658179 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:32:28 crc kubenswrapper[4773]: I0122 13:32:28.880498 4773 generic.go:334] "Generic (PLEG): container finished" podID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerID="659a54590daa2c2223e8d67dede427df68af499875efa923f015e05294de6cae" exitCode=0 Jan 22 13:32:28 crc kubenswrapper[4773]: I0122 13:32:28.881469 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" event={"ID":"c2a3be15-a6cc-4af3-b9c2-b902529c61ff","Type":"ContainerDied","Data":"659a54590daa2c2223e8d67dede427df68af499875efa923f015e05294de6cae"} Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.181437 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.309458 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-dns-svc\") pod \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.309502 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-sb\") pod \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.309640 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwkf2\" (UniqueName: \"kubernetes.io/projected/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-kube-api-access-vwkf2\") pod \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.309707 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-nb\") pod \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.309791 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-config\") pod \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\" (UID: \"c2a3be15-a6cc-4af3-b9c2-b902529c61ff\") " Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.319757 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-kube-api-access-vwkf2" (OuterVolumeSpecName: "kube-api-access-vwkf2") pod "c2a3be15-a6cc-4af3-b9c2-b902529c61ff" (UID: "c2a3be15-a6cc-4af3-b9c2-b902529c61ff"). InnerVolumeSpecName "kube-api-access-vwkf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.357526 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c2a3be15-a6cc-4af3-b9c2-b902529c61ff" (UID: "c2a3be15-a6cc-4af3-b9c2-b902529c61ff"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.358700 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c2a3be15-a6cc-4af3-b9c2-b902529c61ff" (UID: "c2a3be15-a6cc-4af3-b9c2-b902529c61ff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.370738 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c2a3be15-a6cc-4af3-b9c2-b902529c61ff" (UID: "c2a3be15-a6cc-4af3-b9c2-b902529c61ff"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.372961 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-config" (OuterVolumeSpecName: "config") pod "c2a3be15-a6cc-4af3-b9c2-b902529c61ff" (UID: "c2a3be15-a6cc-4af3-b9c2-b902529c61ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.411789 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.411850 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.411863 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.411873 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.411887 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwkf2\" (UniqueName: \"kubernetes.io/projected/c2a3be15-a6cc-4af3-b9c2-b902529c61ff-kube-api-access-vwkf2\") on node \"crc\" DevicePath \"\"" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.896642 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" event={"ID":"c2a3be15-a6cc-4af3-b9c2-b902529c61ff","Type":"ContainerDied","Data":"fc96032704fd49543e188ed4e6bf8139fb433b2fc1881b9d3a68200e0627c293"} Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.896697 4773 scope.go:117] "RemoveContainer" containerID="659a54590daa2c2223e8d67dede427df68af499875efa923f015e05294de6cae" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.896767 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f564d54dc-92l6s" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.945608 4773 scope.go:117] "RemoveContainer" containerID="870c34f13e58cc0070107751fdb3c4e6e88adc01a42e6f7ba53bb5edea998008" Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.946092 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f564d54dc-92l6s"] Jan 22 13:32:29 crc kubenswrapper[4773]: I0122 13:32:29.956490 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f564d54dc-92l6s"] Jan 22 13:32:30 crc kubenswrapper[4773]: I0122 13:32:30.668048 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" path="/var/lib/kubelet/pods/c2a3be15-a6cc-4af3-b9c2-b902529c61ff/volumes" Jan 22 13:32:43 crc kubenswrapper[4773]: I0122 13:32:43.659082 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:32:43 crc kubenswrapper[4773]: E0122 13:32:43.660156 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:32:55 crc kubenswrapper[4773]: I0122 13:32:55.658565 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:32:55 crc kubenswrapper[4773]: E0122 13:32:55.659227 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:32:56 crc kubenswrapper[4773]: I0122 13:32:56.260347 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:32:56 crc kubenswrapper[4773]: I0122 13:32:56.272886 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7dc858bb76-pr6m5" Jan 22 13:33:08 crc kubenswrapper[4773]: I0122 13:33:08.659995 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:33:08 crc kubenswrapper[4773]: E0122 13:33:08.660775 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.212309 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-9j8t9"] Jan 22 13:33:20 crc kubenswrapper[4773]: E0122 13:33:20.213154 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerName="init" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.213168 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerName="init" Jan 22 13:33:20 crc kubenswrapper[4773]: E0122 13:33:20.213188 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerName="dnsmasq-dns" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.213194 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerName="dnsmasq-dns" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.213399 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2a3be15-a6cc-4af3-b9c2-b902529c61ff" containerName="dnsmasq-dns" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.214103 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.222983 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-9j8t9"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.311541 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-c795l"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.313091 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.326698 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-c795l"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.334580 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-b3c5-account-create-update-q77bn"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.335713 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.338768 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.365276 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-b3c5-account-create-update-q77bn"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.368161 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b75d7951-484e-4300-980f-1d6fd90fafda-operator-scripts\") pod \"nova-api-db-create-9j8t9\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.368250 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2xnw\" (UniqueName: \"kubernetes.io/projected/b75d7951-484e-4300-980f-1d6fd90fafda-kube-api-access-r2xnw\") pod \"nova-api-db-create-9j8t9\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.428949 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pd7zj"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.430138 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.437212 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pd7zj"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.470278 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c964n\" (UniqueName: \"kubernetes.io/projected/454aa928-a6e0-4eaf-ab53-170bbed3d372-kube-api-access-c964n\") pod \"nova-api-b3c5-account-create-update-q77bn\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.470812 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b75d7951-484e-4300-980f-1d6fd90fafda-operator-scripts\") pod \"nova-api-db-create-9j8t9\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.470886 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2xnw\" (UniqueName: \"kubernetes.io/projected/b75d7951-484e-4300-980f-1d6fd90fafda-kube-api-access-r2xnw\") pod \"nova-api-db-create-9j8t9\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.471042 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phxjk\" (UniqueName: \"kubernetes.io/projected/317ed68c-c01c-46b2-b8d3-ba753fb1e963-kube-api-access-phxjk\") pod \"nova-cell0-db-create-c795l\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.471099 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/454aa928-a6e0-4eaf-ab53-170bbed3d372-operator-scripts\") pod \"nova-api-b3c5-account-create-update-q77bn\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.471428 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/317ed68c-c01c-46b2-b8d3-ba753fb1e963-operator-scripts\") pod \"nova-cell0-db-create-c795l\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.471851 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b75d7951-484e-4300-980f-1d6fd90fafda-operator-scripts\") pod \"nova-api-db-create-9j8t9\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.490229 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2xnw\" (UniqueName: \"kubernetes.io/projected/b75d7951-484e-4300-980f-1d6fd90fafda-kube-api-access-r2xnw\") pod \"nova-api-db-create-9j8t9\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.527006 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-e9ca-account-create-update-8wgdg"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.530046 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.532724 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.539050 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e9ca-account-create-update-8wgdg"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.572865 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45217af6-253f-424f-b146-2f26a66fa9df-operator-scripts\") pod \"nova-cell1-db-create-pd7zj\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.572943 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/317ed68c-c01c-46b2-b8d3-ba753fb1e963-operator-scripts\") pod \"nova-cell0-db-create-c795l\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.573105 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c964n\" (UniqueName: \"kubernetes.io/projected/454aa928-a6e0-4eaf-ab53-170bbed3d372-kube-api-access-c964n\") pod \"nova-api-b3c5-account-create-update-q77bn\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.573465 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phxjk\" (UniqueName: \"kubernetes.io/projected/317ed68c-c01c-46b2-b8d3-ba753fb1e963-kube-api-access-phxjk\") pod \"nova-cell0-db-create-c795l\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.573515 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/454aa928-a6e0-4eaf-ab53-170bbed3d372-operator-scripts\") pod \"nova-api-b3c5-account-create-update-q77bn\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.573585 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dprnf\" (UniqueName: \"kubernetes.io/projected/45217af6-253f-424f-b146-2f26a66fa9df-kube-api-access-dprnf\") pod \"nova-cell1-db-create-pd7zj\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.573693 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/317ed68c-c01c-46b2-b8d3-ba753fb1e963-operator-scripts\") pod \"nova-cell0-db-create-c795l\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.574512 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/454aa928-a6e0-4eaf-ab53-170bbed3d372-operator-scripts\") pod \"nova-api-b3c5-account-create-update-q77bn\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.577697 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.597895 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c964n\" (UniqueName: \"kubernetes.io/projected/454aa928-a6e0-4eaf-ab53-170bbed3d372-kube-api-access-c964n\") pod \"nova-api-b3c5-account-create-update-q77bn\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.602264 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phxjk\" (UniqueName: \"kubernetes.io/projected/317ed68c-c01c-46b2-b8d3-ba753fb1e963-kube-api-access-phxjk\") pod \"nova-cell0-db-create-c795l\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.637874 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.658098 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:33:20 crc kubenswrapper[4773]: E0122 13:33:20.658435 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.661705 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.674909 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45217af6-253f-424f-b146-2f26a66fa9df-operator-scripts\") pod \"nova-cell1-db-create-pd7zj\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.674981 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt6zs\" (UniqueName: \"kubernetes.io/projected/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-kube-api-access-tt6zs\") pod \"nova-cell0-e9ca-account-create-update-8wgdg\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.675217 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-operator-scripts\") pod \"nova-cell0-e9ca-account-create-update-8wgdg\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.675455 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dprnf\" (UniqueName: \"kubernetes.io/projected/45217af6-253f-424f-b146-2f26a66fa9df-kube-api-access-dprnf\") pod \"nova-cell1-db-create-pd7zj\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.676734 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45217af6-253f-424f-b146-2f26a66fa9df-operator-scripts\") pod \"nova-cell1-db-create-pd7zj\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.708135 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dprnf\" (UniqueName: \"kubernetes.io/projected/45217af6-253f-424f-b146-2f26a66fa9df-kube-api-access-dprnf\") pod \"nova-cell1-db-create-pd7zj\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.743658 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f139-account-create-update-ngwbq"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.748000 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.752181 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.754983 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f139-account-create-update-ngwbq"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.756155 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.776636 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt6zs\" (UniqueName: \"kubernetes.io/projected/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-kube-api-access-tt6zs\") pod \"nova-cell0-e9ca-account-create-update-8wgdg\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.776753 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-operator-scripts\") pod \"nova-cell0-e9ca-account-create-update-8wgdg\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.778473 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-operator-scripts\") pod \"nova-cell0-e9ca-account-create-update-8wgdg\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.801235 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt6zs\" (UniqueName: \"kubernetes.io/projected/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-kube-api-access-tt6zs\") pod \"nova-cell0-e9ca-account-create-update-8wgdg\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.863478 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.877939 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdcz2\" (UniqueName: \"kubernetes.io/projected/c08aea5e-4b5a-4973-8307-71c38dec2718-kube-api-access-pdcz2\") pod \"nova-cell1-f139-account-create-update-ngwbq\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.877999 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08aea5e-4b5a-4973-8307-71c38dec2718-operator-scripts\") pod \"nova-cell1-f139-account-create-update-ngwbq\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.961763 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-9j8t9"] Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.986956 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdcz2\" (UniqueName: \"kubernetes.io/projected/c08aea5e-4b5a-4973-8307-71c38dec2718-kube-api-access-pdcz2\") pod \"nova-cell1-f139-account-create-update-ngwbq\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.987088 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08aea5e-4b5a-4973-8307-71c38dec2718-operator-scripts\") pod \"nova-cell1-f139-account-create-update-ngwbq\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:20 crc kubenswrapper[4773]: I0122 13:33:20.988298 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08aea5e-4b5a-4973-8307-71c38dec2718-operator-scripts\") pod \"nova-cell1-f139-account-create-update-ngwbq\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.065854 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdcz2\" (UniqueName: \"kubernetes.io/projected/c08aea5e-4b5a-4973-8307-71c38dec2718-kube-api-access-pdcz2\") pod \"nova-cell1-f139-account-create-update-ngwbq\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.181749 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.392171 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9j8t9" event={"ID":"b75d7951-484e-4300-980f-1d6fd90fafda","Type":"ContainerStarted","Data":"27c7751213b7ac989f1b7deb8b6fe85da9524da940938db1e00c0e6b12441922"} Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.492445 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-b3c5-account-create-update-q77bn"] Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.577359 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-c795l"] Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.654475 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pd7zj"] Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.739162 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e9ca-account-create-update-8wgdg"] Jan 22 13:33:21 crc kubenswrapper[4773]: I0122 13:33:21.832167 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f139-account-create-update-ngwbq"] Jan 22 13:33:21 crc kubenswrapper[4773]: W0122 13:33:21.842765 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc08aea5e_4b5a_4973_8307_71c38dec2718.slice/crio-f7f6861a5bd6ad700725103a2c5ae531dd41c2b2de27defb54234b7599d8fe57 WatchSource:0}: Error finding container f7f6861a5bd6ad700725103a2c5ae531dd41c2b2de27defb54234b7599d8fe57: Status 404 returned error can't find the container with id f7f6861a5bd6ad700725103a2c5ae531dd41c2b2de27defb54234b7599d8fe57 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.402728 4773 generic.go:334] "Generic (PLEG): container finished" podID="317ed68c-c01c-46b2-b8d3-ba753fb1e963" containerID="4c51bb3b636763d9a983c394a01fdb2b2207faf5ca471f7db77abd7ec9a76f93" exitCode=0 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.402839 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-c795l" event={"ID":"317ed68c-c01c-46b2-b8d3-ba753fb1e963","Type":"ContainerDied","Data":"4c51bb3b636763d9a983c394a01fdb2b2207faf5ca471f7db77abd7ec9a76f93"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.402998 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-c795l" event={"ID":"317ed68c-c01c-46b2-b8d3-ba753fb1e963","Type":"ContainerStarted","Data":"c03fa40fe9ded1f5891220225de8cdb83da60e6f9e8492fbfddac1cff38aba55"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.405517 4773 generic.go:334] "Generic (PLEG): container finished" podID="454aa928-a6e0-4eaf-ab53-170bbed3d372" containerID="4c321afd1b06b90404d62d5fa65b47ac50f2a4c0154dc8ae436154834c1721ba" exitCode=0 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.405614 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-b3c5-account-create-update-q77bn" event={"ID":"454aa928-a6e0-4eaf-ab53-170bbed3d372","Type":"ContainerDied","Data":"4c321afd1b06b90404d62d5fa65b47ac50f2a4c0154dc8ae436154834c1721ba"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.405672 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-b3c5-account-create-update-q77bn" event={"ID":"454aa928-a6e0-4eaf-ab53-170bbed3d372","Type":"ContainerStarted","Data":"8f7e96c794c99768ca62c0aee6c2bfd5db4a316bdf3bc8d6c8610fd5be2be3e4"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.408060 4773 generic.go:334] "Generic (PLEG): container finished" podID="45217af6-253f-424f-b146-2f26a66fa9df" containerID="4eae09eafdf8b1fd9443239c072d17d5ba48962677fb9907558b0d30ceec0a21" exitCode=0 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.408107 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pd7zj" event={"ID":"45217af6-253f-424f-b146-2f26a66fa9df","Type":"ContainerDied","Data":"4eae09eafdf8b1fd9443239c072d17d5ba48962677fb9907558b0d30ceec0a21"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.408164 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pd7zj" event={"ID":"45217af6-253f-424f-b146-2f26a66fa9df","Type":"ContainerStarted","Data":"8d8ad0f4989d122d08cfcfb433fba431208434c859bce755655575d21b9741fa"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.410063 4773 generic.go:334] "Generic (PLEG): container finished" podID="1f323d06-9ccd-47eb-9cc2-1195d6d87e75" containerID="03ab9ec0d99f4b6cbc63f93fa3e034e57653be93c4c60e6b910b6218a3373f70" exitCode=0 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.410102 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" event={"ID":"1f323d06-9ccd-47eb-9cc2-1195d6d87e75","Type":"ContainerDied","Data":"03ab9ec0d99f4b6cbc63f93fa3e034e57653be93c4c60e6b910b6218a3373f70"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.410143 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" event={"ID":"1f323d06-9ccd-47eb-9cc2-1195d6d87e75","Type":"ContainerStarted","Data":"d169944beb4a2c1d5db059c81f52685be1f74e8f1c806fffdd4672b4139b488c"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.412126 4773 generic.go:334] "Generic (PLEG): container finished" podID="c08aea5e-4b5a-4973-8307-71c38dec2718" containerID="41d17ec951c709ec52a04c0d579439533f01127f7683c9d2e2fbfbd23184ece7" exitCode=0 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.412177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" event={"ID":"c08aea5e-4b5a-4973-8307-71c38dec2718","Type":"ContainerDied","Data":"41d17ec951c709ec52a04c0d579439533f01127f7683c9d2e2fbfbd23184ece7"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.412205 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" event={"ID":"c08aea5e-4b5a-4973-8307-71c38dec2718","Type":"ContainerStarted","Data":"f7f6861a5bd6ad700725103a2c5ae531dd41c2b2de27defb54234b7599d8fe57"} Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.415029 4773 generic.go:334] "Generic (PLEG): container finished" podID="b75d7951-484e-4300-980f-1d6fd90fafda" containerID="d6902b610a467b025db273ef0022bd28db2eabfc0e0030d639257578f658b528" exitCode=0 Jan 22 13:33:22 crc kubenswrapper[4773]: I0122 13:33:22.415056 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9j8t9" event={"ID":"b75d7951-484e-4300-980f-1d6fd90fafda","Type":"ContainerDied","Data":"d6902b610a467b025db273ef0022bd28db2eabfc0e0030d639257578f658b528"} Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.797077 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.860360 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/454aa928-a6e0-4eaf-ab53-170bbed3d372-operator-scripts\") pod \"454aa928-a6e0-4eaf-ab53-170bbed3d372\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.860434 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c964n\" (UniqueName: \"kubernetes.io/projected/454aa928-a6e0-4eaf-ab53-170bbed3d372-kube-api-access-c964n\") pod \"454aa928-a6e0-4eaf-ab53-170bbed3d372\" (UID: \"454aa928-a6e0-4eaf-ab53-170bbed3d372\") " Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.860804 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/454aa928-a6e0-4eaf-ab53-170bbed3d372-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "454aa928-a6e0-4eaf-ab53-170bbed3d372" (UID: "454aa928-a6e0-4eaf-ab53-170bbed3d372"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.861933 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/454aa928-a6e0-4eaf-ab53-170bbed3d372-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.871803 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/454aa928-a6e0-4eaf-ab53-170bbed3d372-kube-api-access-c964n" (OuterVolumeSpecName: "kube-api-access-c964n") pod "454aa928-a6e0-4eaf-ab53-170bbed3d372" (UID: "454aa928-a6e0-4eaf-ab53-170bbed3d372"). InnerVolumeSpecName "kube-api-access-c964n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:23 crc kubenswrapper[4773]: I0122 13:33:23.964200 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c964n\" (UniqueName: \"kubernetes.io/projected/454aa928-a6e0-4eaf-ab53-170bbed3d372-kube-api-access-c964n\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.024551 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.035177 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.046704 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.067588 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.068561 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdcz2\" (UniqueName: \"kubernetes.io/projected/c08aea5e-4b5a-4973-8307-71c38dec2718-kube-api-access-pdcz2\") pod \"c08aea5e-4b5a-4973-8307-71c38dec2718\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.070989 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08aea5e-4b5a-4973-8307-71c38dec2718-operator-scripts\") pod \"c08aea5e-4b5a-4973-8307-71c38dec2718\" (UID: \"c08aea5e-4b5a-4973-8307-71c38dec2718\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.071124 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-operator-scripts\") pod \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.071192 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt6zs\" (UniqueName: \"kubernetes.io/projected/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-kube-api-access-tt6zs\") pod \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\" (UID: \"1f323d06-9ccd-47eb-9cc2-1195d6d87e75\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.071473 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c08aea5e-4b5a-4973-8307-71c38dec2718-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c08aea5e-4b5a-4973-8307-71c38dec2718" (UID: "c08aea5e-4b5a-4973-8307-71c38dec2718"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.071616 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.071637 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/317ed68c-c01c-46b2-b8d3-ba753fb1e963-operator-scripts\") pod \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.073571 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phxjk\" (UniqueName: \"kubernetes.io/projected/317ed68c-c01c-46b2-b8d3-ba753fb1e963-kube-api-access-phxjk\") pod \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\" (UID: \"317ed68c-c01c-46b2-b8d3-ba753fb1e963\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.074324 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08aea5e-4b5a-4973-8307-71c38dec2718-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.075352 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/317ed68c-c01c-46b2-b8d3-ba753fb1e963-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "317ed68c-c01c-46b2-b8d3-ba753fb1e963" (UID: "317ed68c-c01c-46b2-b8d3-ba753fb1e963"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.075908 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c08aea5e-4b5a-4973-8307-71c38dec2718-kube-api-access-pdcz2" (OuterVolumeSpecName: "kube-api-access-pdcz2") pod "c08aea5e-4b5a-4973-8307-71c38dec2718" (UID: "c08aea5e-4b5a-4973-8307-71c38dec2718"). InnerVolumeSpecName "kube-api-access-pdcz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.076180 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f323d06-9ccd-47eb-9cc2-1195d6d87e75" (UID: "1f323d06-9ccd-47eb-9cc2-1195d6d87e75"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.079547 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/317ed68c-c01c-46b2-b8d3-ba753fb1e963-kube-api-access-phxjk" (OuterVolumeSpecName: "kube-api-access-phxjk") pod "317ed68c-c01c-46b2-b8d3-ba753fb1e963" (UID: "317ed68c-c01c-46b2-b8d3-ba753fb1e963"). InnerVolumeSpecName "kube-api-access-phxjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.081263 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-kube-api-access-tt6zs" (OuterVolumeSpecName: "kube-api-access-tt6zs") pod "1f323d06-9ccd-47eb-9cc2-1195d6d87e75" (UID: "1f323d06-9ccd-47eb-9cc2-1195d6d87e75"). InnerVolumeSpecName "kube-api-access-tt6zs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.182985 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2xnw\" (UniqueName: \"kubernetes.io/projected/b75d7951-484e-4300-980f-1d6fd90fafda-kube-api-access-r2xnw\") pod \"b75d7951-484e-4300-980f-1d6fd90fafda\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.183119 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45217af6-253f-424f-b146-2f26a66fa9df-operator-scripts\") pod \"45217af6-253f-424f-b146-2f26a66fa9df\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.183164 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dprnf\" (UniqueName: \"kubernetes.io/projected/45217af6-253f-424f-b146-2f26a66fa9df-kube-api-access-dprnf\") pod \"45217af6-253f-424f-b146-2f26a66fa9df\" (UID: \"45217af6-253f-424f-b146-2f26a66fa9df\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.183263 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b75d7951-484e-4300-980f-1d6fd90fafda-operator-scripts\") pod \"b75d7951-484e-4300-980f-1d6fd90fafda\" (UID: \"b75d7951-484e-4300-980f-1d6fd90fafda\") " Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.184544 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.184577 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt6zs\" (UniqueName: \"kubernetes.io/projected/1f323d06-9ccd-47eb-9cc2-1195d6d87e75-kube-api-access-tt6zs\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.184595 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/317ed68c-c01c-46b2-b8d3-ba753fb1e963-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.184613 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phxjk\" (UniqueName: \"kubernetes.io/projected/317ed68c-c01c-46b2-b8d3-ba753fb1e963-kube-api-access-phxjk\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.184627 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdcz2\" (UniqueName: \"kubernetes.io/projected/c08aea5e-4b5a-4973-8307-71c38dec2718-kube-api-access-pdcz2\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.185307 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b75d7951-484e-4300-980f-1d6fd90fafda-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b75d7951-484e-4300-980f-1d6fd90fafda" (UID: "b75d7951-484e-4300-980f-1d6fd90fafda"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.188699 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b75d7951-484e-4300-980f-1d6fd90fafda-kube-api-access-r2xnw" (OuterVolumeSpecName: "kube-api-access-r2xnw") pod "b75d7951-484e-4300-980f-1d6fd90fafda" (UID: "b75d7951-484e-4300-980f-1d6fd90fafda"). InnerVolumeSpecName "kube-api-access-r2xnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.189191 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45217af6-253f-424f-b146-2f26a66fa9df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "45217af6-253f-424f-b146-2f26a66fa9df" (UID: "45217af6-253f-424f-b146-2f26a66fa9df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.191634 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45217af6-253f-424f-b146-2f26a66fa9df-kube-api-access-dprnf" (OuterVolumeSpecName: "kube-api-access-dprnf") pod "45217af6-253f-424f-b146-2f26a66fa9df" (UID: "45217af6-253f-424f-b146-2f26a66fa9df"). InnerVolumeSpecName "kube-api-access-dprnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.286110 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2xnw\" (UniqueName: \"kubernetes.io/projected/b75d7951-484e-4300-980f-1d6fd90fafda-kube-api-access-r2xnw\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.286413 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45217af6-253f-424f-b146-2f26a66fa9df-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.286427 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dprnf\" (UniqueName: \"kubernetes.io/projected/45217af6-253f-424f-b146-2f26a66fa9df-kube-api-access-dprnf\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.286439 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b75d7951-484e-4300-980f-1d6fd90fafda-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.435592 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" event={"ID":"c08aea5e-4b5a-4973-8307-71c38dec2718","Type":"ContainerDied","Data":"f7f6861a5bd6ad700725103a2c5ae531dd41c2b2de27defb54234b7599d8fe57"} Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.435952 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7f6861a5bd6ad700725103a2c5ae531dd41c2b2de27defb54234b7599d8fe57" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.435636 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f139-account-create-update-ngwbq" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.437392 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-9j8t9" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.437393 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-9j8t9" event={"ID":"b75d7951-484e-4300-980f-1d6fd90fafda","Type":"ContainerDied","Data":"27c7751213b7ac989f1b7deb8b6fe85da9524da940938db1e00c0e6b12441922"} Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.437439 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27c7751213b7ac989f1b7deb8b6fe85da9524da940938db1e00c0e6b12441922" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.439080 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-b3c5-account-create-update-q77bn" event={"ID":"454aa928-a6e0-4eaf-ab53-170bbed3d372","Type":"ContainerDied","Data":"8f7e96c794c99768ca62c0aee6c2bfd5db4a316bdf3bc8d6c8610fd5be2be3e4"} Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.439099 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-b3c5-account-create-update-q77bn" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.439107 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f7e96c794c99768ca62c0aee6c2bfd5db4a316bdf3bc8d6c8610fd5be2be3e4" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.441093 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-c795l" event={"ID":"317ed68c-c01c-46b2-b8d3-ba753fb1e963","Type":"ContainerDied","Data":"c03fa40fe9ded1f5891220225de8cdb83da60e6f9e8492fbfddac1cff38aba55"} Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.441115 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-c795l" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.441133 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c03fa40fe9ded1f5891220225de8cdb83da60e6f9e8492fbfddac1cff38aba55" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.442690 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pd7zj" event={"ID":"45217af6-253f-424f-b146-2f26a66fa9df","Type":"ContainerDied","Data":"8d8ad0f4989d122d08cfcfb433fba431208434c859bce755655575d21b9741fa"} Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.442716 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d8ad0f4989d122d08cfcfb433fba431208434c859bce755655575d21b9741fa" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.442768 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pd7zj" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.444887 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" event={"ID":"1f323d06-9ccd-47eb-9cc2-1195d6d87e75","Type":"ContainerDied","Data":"d169944beb4a2c1d5db059c81f52685be1f74e8f1c806fffdd4672b4139b488c"} Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.444940 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d169944beb4a2c1d5db059c81f52685be1f74e8f1c806fffdd4672b4139b488c" Jan 22 13:33:24 crc kubenswrapper[4773]: I0122 13:33:24.444999 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e9ca-account-create-update-8wgdg" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751228 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-zw5ls"] Jan 22 13:33:25 crc kubenswrapper[4773]: E0122 13:33:25.751717 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="454aa928-a6e0-4eaf-ab53-170bbed3d372" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751732 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="454aa928-a6e0-4eaf-ab53-170bbed3d372" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: E0122 13:33:25.751749 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317ed68c-c01c-46b2-b8d3-ba753fb1e963" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751757 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="317ed68c-c01c-46b2-b8d3-ba753fb1e963" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: E0122 13:33:25.751771 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45217af6-253f-424f-b146-2f26a66fa9df" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751779 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="45217af6-253f-424f-b146-2f26a66fa9df" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: E0122 13:33:25.751796 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08aea5e-4b5a-4973-8307-71c38dec2718" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751804 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08aea5e-4b5a-4973-8307-71c38dec2718" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: E0122 13:33:25.751820 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f323d06-9ccd-47eb-9cc2-1195d6d87e75" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751827 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f323d06-9ccd-47eb-9cc2-1195d6d87e75" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: E0122 13:33:25.751848 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b75d7951-484e-4300-980f-1d6fd90fafda" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.751855 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b75d7951-484e-4300-980f-1d6fd90fafda" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.752079 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b75d7951-484e-4300-980f-1d6fd90fafda" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.752617 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="317ed68c-c01c-46b2-b8d3-ba753fb1e963" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.752633 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="45217af6-253f-424f-b146-2f26a66fa9df" containerName="mariadb-database-create" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.752649 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="454aa928-a6e0-4eaf-ab53-170bbed3d372" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.752670 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08aea5e-4b5a-4973-8307-71c38dec2718" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.752685 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f323d06-9ccd-47eb-9cc2-1195d6d87e75" containerName="mariadb-account-create-update" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.753521 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.756793 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.757471 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6m5sd" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.760907 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.781007 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-zw5ls"] Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.822449 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-config-data\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.822809 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-scripts\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.822878 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmtvm\" (UniqueName: \"kubernetes.io/projected/14f6d50c-c34c-438d-9e1a-994b1948f410-kube-api-access-zmtvm\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.822947 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.924665 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmtvm\" (UniqueName: \"kubernetes.io/projected/14f6d50c-c34c-438d-9e1a-994b1948f410-kube-api-access-zmtvm\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.924785 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.925693 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-config-data\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.925756 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-scripts\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.932144 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.933043 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-scripts\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.943222 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-config-data\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:25 crc kubenswrapper[4773]: I0122 13:33:25.946777 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmtvm\" (UniqueName: \"kubernetes.io/projected/14f6d50c-c34c-438d-9e1a-994b1948f410-kube-api-access-zmtvm\") pod \"nova-cell0-conductor-db-sync-zw5ls\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:26 crc kubenswrapper[4773]: I0122 13:33:26.096515 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:26 crc kubenswrapper[4773]: W0122 13:33:26.387130 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f6d50c_c34c_438d_9e1a_994b1948f410.slice/crio-c0fb7478c8c16f699a2b9b668c3ed761328714a9838945ea92281af4e1e98aa3 WatchSource:0}: Error finding container c0fb7478c8c16f699a2b9b668c3ed761328714a9838945ea92281af4e1e98aa3: Status 404 returned error can't find the container with id c0fb7478c8c16f699a2b9b668c3ed761328714a9838945ea92281af4e1e98aa3 Jan 22 13:33:26 crc kubenswrapper[4773]: I0122 13:33:26.388496 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-zw5ls"] Jan 22 13:33:26 crc kubenswrapper[4773]: I0122 13:33:26.464510 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" event={"ID":"14f6d50c-c34c-438d-9e1a-994b1948f410","Type":"ContainerStarted","Data":"c0fb7478c8c16f699a2b9b668c3ed761328714a9838945ea92281af4e1e98aa3"} Jan 22 13:33:27 crc kubenswrapper[4773]: I0122 13:33:27.476380 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" event={"ID":"14f6d50c-c34c-438d-9e1a-994b1948f410","Type":"ContainerStarted","Data":"7cbaae31421963b6f28ca2af7da2a8f4379da82ea2e3c7d1aacd16bd6accdd8b"} Jan 22 13:33:27 crc kubenswrapper[4773]: I0122 13:33:27.518176 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" podStartSLOduration=2.518150541 podStartE2EDuration="2.518150541s" podCreationTimestamp="2026-01-22 13:33:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:27.498367995 +0000 UTC m=+5915.076483850" watchObservedRunningTime="2026-01-22 13:33:27.518150541 +0000 UTC m=+5915.096266376" Jan 22 13:33:32 crc kubenswrapper[4773]: I0122 13:33:32.546525 4773 generic.go:334] "Generic (PLEG): container finished" podID="14f6d50c-c34c-438d-9e1a-994b1948f410" containerID="7cbaae31421963b6f28ca2af7da2a8f4379da82ea2e3c7d1aacd16bd6accdd8b" exitCode=0 Jan 22 13:33:32 crc kubenswrapper[4773]: I0122 13:33:32.546612 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" event={"ID":"14f6d50c-c34c-438d-9e1a-994b1948f410","Type":"ContainerDied","Data":"7cbaae31421963b6f28ca2af7da2a8f4379da82ea2e3c7d1aacd16bd6accdd8b"} Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.874067 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.897862 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-config-data\") pod \"14f6d50c-c34c-438d-9e1a-994b1948f410\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.897932 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-scripts\") pod \"14f6d50c-c34c-438d-9e1a-994b1948f410\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.897962 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmtvm\" (UniqueName: \"kubernetes.io/projected/14f6d50c-c34c-438d-9e1a-994b1948f410-kube-api-access-zmtvm\") pod \"14f6d50c-c34c-438d-9e1a-994b1948f410\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.898053 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-combined-ca-bundle\") pod \"14f6d50c-c34c-438d-9e1a-994b1948f410\" (UID: \"14f6d50c-c34c-438d-9e1a-994b1948f410\") " Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.902961 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f6d50c-c34c-438d-9e1a-994b1948f410-kube-api-access-zmtvm" (OuterVolumeSpecName: "kube-api-access-zmtvm") pod "14f6d50c-c34c-438d-9e1a-994b1948f410" (UID: "14f6d50c-c34c-438d-9e1a-994b1948f410"). InnerVolumeSpecName "kube-api-access-zmtvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.903427 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-scripts" (OuterVolumeSpecName: "scripts") pod "14f6d50c-c34c-438d-9e1a-994b1948f410" (UID: "14f6d50c-c34c-438d-9e1a-994b1948f410"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.925270 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14f6d50c-c34c-438d-9e1a-994b1948f410" (UID: "14f6d50c-c34c-438d-9e1a-994b1948f410"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:33 crc kubenswrapper[4773]: I0122 13:33:33.925869 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-config-data" (OuterVolumeSpecName: "config-data") pod "14f6d50c-c34c-438d-9e1a-994b1948f410" (UID: "14f6d50c-c34c-438d-9e1a-994b1948f410"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.000203 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.000246 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.000263 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmtvm\" (UniqueName: \"kubernetes.io/projected/14f6d50c-c34c-438d-9e1a-994b1948f410-kube-api-access-zmtvm\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.000327 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f6d50c-c34c-438d-9e1a-994b1948f410-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.565690 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" event={"ID":"14f6d50c-c34c-438d-9e1a-994b1948f410","Type":"ContainerDied","Data":"c0fb7478c8c16f699a2b9b668c3ed761328714a9838945ea92281af4e1e98aa3"} Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.565752 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0fb7478c8c16f699a2b9b668c3ed761328714a9838945ea92281af4e1e98aa3" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.565833 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-zw5ls" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.669701 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 13:33:34 crc kubenswrapper[4773]: E0122 13:33:34.670069 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f6d50c-c34c-438d-9e1a-994b1948f410" containerName="nova-cell0-conductor-db-sync" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.670094 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f6d50c-c34c-438d-9e1a-994b1948f410" containerName="nova-cell0-conductor-db-sync" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.670381 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f6d50c-c34c-438d-9e1a-994b1948f410" containerName="nova-cell0-conductor-db-sync" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.671202 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.675148 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6m5sd" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.675253 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.680549 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.815927 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.816505 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.816551 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjz78\" (UniqueName: \"kubernetes.io/projected/92a09152-089d-4802-ae3a-5e9d84b996c4-kube-api-access-fjz78\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.918235 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.918315 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjz78\" (UniqueName: \"kubernetes.io/projected/92a09152-089d-4802-ae3a-5e9d84b996c4-kube-api-access-fjz78\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.918823 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.923327 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.930411 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.933101 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjz78\" (UniqueName: \"kubernetes.io/projected/92a09152-089d-4802-ae3a-5e9d84b996c4-kube-api-access-fjz78\") pod \"nova-cell0-conductor-0\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:34 crc kubenswrapper[4773]: I0122 13:33:34.991272 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:35 crc kubenswrapper[4773]: I0122 13:33:35.452574 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 13:33:35 crc kubenswrapper[4773]: I0122 13:33:35.577170 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"92a09152-089d-4802-ae3a-5e9d84b996c4","Type":"ContainerStarted","Data":"88798cd4f0144de088fd9a432cd7e01e085edac4c5e0920509d19cfda0e3fee4"} Jan 22 13:33:35 crc kubenswrapper[4773]: I0122 13:33:35.658032 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:33:35 crc kubenswrapper[4773]: E0122 13:33:35.658364 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:33:36 crc kubenswrapper[4773]: I0122 13:33:36.589166 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"92a09152-089d-4802-ae3a-5e9d84b996c4","Type":"ContainerStarted","Data":"ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89"} Jan 22 13:33:36 crc kubenswrapper[4773]: I0122 13:33:36.622856 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.622831084 podStartE2EDuration="2.622831084s" podCreationTimestamp="2026-01-22 13:33:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:36.616613 +0000 UTC m=+5924.194728835" watchObservedRunningTime="2026-01-22 13:33:36.622831084 +0000 UTC m=+5924.200946919" Jan 22 13:33:37 crc kubenswrapper[4773]: I0122 13:33:37.598042 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.019851 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.502057 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-c6gl6"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.503900 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.507920 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.508157 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.522692 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-c6gl6"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.553886 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-scripts\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.554000 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2d68\" (UniqueName: \"kubernetes.io/projected/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-kube-api-access-s2d68\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.554068 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-config-data\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.554132 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.670721 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2d68\" (UniqueName: \"kubernetes.io/projected/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-kube-api-access-s2d68\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.670835 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-config-data\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.670883 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.670987 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-scripts\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.676468 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-config-data\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.681826 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-scripts\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.691193 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.693979 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2d68\" (UniqueName: \"kubernetes.io/projected/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-kube-api-access-s2d68\") pod \"nova-cell0-cell-mapping-c6gl6\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.733354 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.734924 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.741481 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.760419 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.814117 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.833956 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.856256 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.856398 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.866449 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.889444 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-config-data\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.890271 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.890450 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4wdq\" (UniqueName: \"kubernetes.io/projected/80dfc6ef-7650-4890-b457-9e221da5fc24-kube-api-access-g4wdq\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.913936 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.915222 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.935067 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.977384 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.995762 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4wdq\" (UniqueName: \"kubernetes.io/projected/80dfc6ef-7650-4890-b457-9e221da5fc24-kube-api-access-g4wdq\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.995829 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.995877 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33888722-8904-4778-b046-b9d85663d4f0-logs\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.995929 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-config-data\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.995959 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-config-data\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.996028 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.996059 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2ngx\" (UniqueName: \"kubernetes.io/projected/33888722-8904-4778-b046-b9d85663d4f0-kube-api-access-x2ngx\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.996272 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:45 crc kubenswrapper[4773]: I0122 13:33:45.998349 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.002868 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.004013 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-config-data\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.013589 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.056274 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4wdq\" (UniqueName: \"kubernetes.io/projected/80dfc6ef-7650-4890-b457-9e221da5fc24-kube-api-access-g4wdq\") pod \"nova-scheduler-0\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " pod="openstack/nova-scheduler-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.080345 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.095883 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.098983 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67sfm\" (UniqueName: \"kubernetes.io/projected/b69e8a60-38b7-4461-ad44-f6402ca75bc3-kube-api-access-67sfm\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099038 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-config-data\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099114 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099141 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2ngx\" (UniqueName: \"kubernetes.io/projected/33888722-8904-4778-b046-b9d85663d4f0-kube-api-access-x2ngx\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099196 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099249 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33888722-8904-4778-b046-b9d85663d4f0-logs\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.099670 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33888722-8904-4778-b046-b9d85663d4f0-logs\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.117273 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.117882 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-config-data\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.141018 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2ngx\" (UniqueName: \"kubernetes.io/projected/33888722-8904-4778-b046-b9d85663d4f0-kube-api-access-x2ngx\") pod \"nova-api-0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.151432 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-594b9845b9-7sx28"] Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.155192 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.180711 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-594b9845b9-7sx28"] Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200661 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcgfd\" (UniqueName: \"kubernetes.io/projected/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-kube-api-access-dcgfd\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200765 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200845 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200871 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200939 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-logs\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200968 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67sfm\" (UniqueName: \"kubernetes.io/projected/b69e8a60-38b7-4461-ad44-f6402ca75bc3-kube-api-access-67sfm\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.200992 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-config-data\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.205497 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.210338 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.220216 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67sfm\" (UniqueName: \"kubernetes.io/projected/b69e8a60-38b7-4461-ad44-f6402ca75bc3-kube-api-access-67sfm\") pod \"nova-cell1-novncproxy-0\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.259814 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.274556 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.304259 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcgfd\" (UniqueName: \"kubernetes.io/projected/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-kube-api-access-dcgfd\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.304465 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.304503 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-sb\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.304572 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-dns-svc\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.304797 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-logs\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.304900 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsjtq\" (UniqueName: \"kubernetes.io/projected/56f9dd78-8e18-4b35-bacb-7159d1aefb47-kube-api-access-dsjtq\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.305227 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-logs\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.305322 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-config-data\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.305931 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-config\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.305992 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-nb\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.308079 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.311189 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-config-data\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.322673 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcgfd\" (UniqueName: \"kubernetes.io/projected/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-kube-api-access-dcgfd\") pod \"nova-metadata-0\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.407257 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsjtq\" (UniqueName: \"kubernetes.io/projected/56f9dd78-8e18-4b35-bacb-7159d1aefb47-kube-api-access-dsjtq\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.407382 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-config\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.407439 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-nb\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.407538 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-sb\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.407585 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-dns-svc\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.408511 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-dns-svc\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.409070 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-nb\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.409543 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-config\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.409707 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-sb\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.428347 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsjtq\" (UniqueName: \"kubernetes.io/projected/56f9dd78-8e18-4b35-bacb-7159d1aefb47-kube-api-access-dsjtq\") pod \"dnsmasq-dns-594b9845b9-7sx28\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.435542 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.486066 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.531135 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-c6gl6"] Jan 22 13:33:46 crc kubenswrapper[4773]: W0122 13:33:46.546227 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5409c3ae_c282_4e60_bc93_e0c5bfdb1304.slice/crio-71b56680cb95fd1be9543bf83bc68749455be0286210a189452626957e4a2fe7 WatchSource:0}: Error finding container 71b56680cb95fd1be9543bf83bc68749455be0286210a189452626957e4a2fe7: Status 404 returned error can't find the container with id 71b56680cb95fd1be9543bf83bc68749455be0286210a189452626957e4a2fe7 Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.655495 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:33:46 crc kubenswrapper[4773]: W0122 13:33:46.655874 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80dfc6ef_7650_4890_b457_9e221da5fc24.slice/crio-3afd79ca5b9c231d8a1826ecb147d44af4efca813a8f9c33a26566f3254f2621 WatchSource:0}: Error finding container 3afd79ca5b9c231d8a1826ecb147d44af4efca813a8f9c33a26566f3254f2621: Status 404 returned error can't find the container with id 3afd79ca5b9c231d8a1826ecb147d44af4efca813a8f9c33a26566f3254f2621 Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.657838 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:33:46 crc kubenswrapper[4773]: E0122 13:33:46.658159 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.724929 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s4ssz"] Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.727505 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.730154 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"80dfc6ef-7650-4890-b457-9e221da5fc24","Type":"ContainerStarted","Data":"3afd79ca5b9c231d8a1826ecb147d44af4efca813a8f9c33a26566f3254f2621"} Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.731643 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s4ssz"] Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.731964 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-c6gl6" event={"ID":"5409c3ae-c282-4e60-bc93-e0c5bfdb1304","Type":"ContainerStarted","Data":"71b56680cb95fd1be9543bf83bc68749455be0286210a189452626957e4a2fe7"} Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.732304 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.732879 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.805806 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.818635 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.818696 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-config-data\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.818744 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-scripts\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.818764 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2rt2\" (UniqueName: \"kubernetes.io/projected/16320e64-130c-4657-acc2-6025d895a31d-kube-api-access-n2rt2\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.857205 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:46 crc kubenswrapper[4773]: W0122 13:33:46.864108 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb69e8a60_38b7_4461_ad44_f6402ca75bc3.slice/crio-f01596d8cb9f1019ac7e5c054987ba53f099cf1edc98d389f443effc8ce9ff72 WatchSource:0}: Error finding container f01596d8cb9f1019ac7e5c054987ba53f099cf1edc98d389f443effc8ce9ff72: Status 404 returned error can't find the container with id f01596d8cb9f1019ac7e5c054987ba53f099cf1edc98d389f443effc8ce9ff72 Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.920633 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-config-data\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.921048 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-scripts\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.921209 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2rt2\" (UniqueName: \"kubernetes.io/projected/16320e64-130c-4657-acc2-6025d895a31d-kube-api-access-n2rt2\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.921487 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.925205 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.925454 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-scripts\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.932079 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-config-data\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:46 crc kubenswrapper[4773]: I0122 13:33:46.936441 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2rt2\" (UniqueName: \"kubernetes.io/projected/16320e64-130c-4657-acc2-6025d895a31d-kube-api-access-n2rt2\") pod \"nova-cell1-conductor-db-sync-s4ssz\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.001050 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.071133 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.085104 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-594b9845b9-7sx28"] Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.578521 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s4ssz"] Jan 22 13:33:47 crc kubenswrapper[4773]: W0122 13:33:47.579437 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16320e64_130c_4657_acc2_6025d895a31d.slice/crio-f9d471efba007403cd2576d345d3f6c9ced234bf27cc49066b3dc9a677e34461 WatchSource:0}: Error finding container f9d471efba007403cd2576d345d3f6c9ced234bf27cc49066b3dc9a677e34461: Status 404 returned error can't find the container with id f9d471efba007403cd2576d345d3f6c9ced234bf27cc49066b3dc9a677e34461 Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.755425 4773 generic.go:334] "Generic (PLEG): container finished" podID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerID="909c7a95447e8dbf869b3653a7d6600c03acbe0dc5c0042c5482eb142a67ae0c" exitCode=0 Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.755806 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" event={"ID":"56f9dd78-8e18-4b35-bacb-7159d1aefb47","Type":"ContainerDied","Data":"909c7a95447e8dbf869b3653a7d6600c03acbe0dc5c0042c5482eb142a67ae0c"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.755845 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" event={"ID":"56f9dd78-8e18-4b35-bacb-7159d1aefb47","Type":"ContainerStarted","Data":"723e14c2d1ea0f67a2a1de743d36f578b31a003cc9f7ef5c74030fbb260b58aa"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.775945 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-c6gl6" event={"ID":"5409c3ae-c282-4e60-bc93-e0c5bfdb1304","Type":"ContainerStarted","Data":"879cd149d7544b2f8d5892b15d2f0516649eef95b56cb998b7e5898652ef4c90"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.780439 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b69e8a60-38b7-4461-ad44-f6402ca75bc3","Type":"ContainerStarted","Data":"f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.780504 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b69e8a60-38b7-4461-ad44-f6402ca75bc3","Type":"ContainerStarted","Data":"f01596d8cb9f1019ac7e5c054987ba53f099cf1edc98d389f443effc8ce9ff72"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.783702 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56","Type":"ContainerStarted","Data":"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.783745 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56","Type":"ContainerStarted","Data":"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.783758 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56","Type":"ContainerStarted","Data":"34ec4f21275d172f782f74b629867e0280d6025237a9b29e8648c76c53e63de0"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.787909 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" event={"ID":"16320e64-130c-4657-acc2-6025d895a31d","Type":"ContainerStarted","Data":"f9d471efba007403cd2576d345d3f6c9ced234bf27cc49066b3dc9a677e34461"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.794240 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"80dfc6ef-7650-4890-b457-9e221da5fc24","Type":"ContainerStarted","Data":"95191aa98448678d853ea30ef9e272b61203d87a0acacceeb8f937995f4ea5a5"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.811591 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"33888722-8904-4778-b046-b9d85663d4f0","Type":"ContainerStarted","Data":"62aa3ed0057ee80ce4ab3d56e50d399a0abab78f0cd6cdf3264ba28f94e45dd8"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.811663 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"33888722-8904-4778-b046-b9d85663d4f0","Type":"ContainerStarted","Data":"219828e5c4a0a0bba023541b6ecec32b35f8bf06b646e96e2dc5b8428fe1b386"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.811683 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"33888722-8904-4778-b046-b9d85663d4f0","Type":"ContainerStarted","Data":"789ca391b08f4013955a2330f4e4ff65762b0b4491d026ade9e95ab76c553c19"} Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.861104 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.861071464 podStartE2EDuration="2.861071464s" podCreationTimestamp="2026-01-22 13:33:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:47.814173186 +0000 UTC m=+5935.392289061" watchObservedRunningTime="2026-01-22 13:33:47.861071464 +0000 UTC m=+5935.439187289" Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.881913 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.881888919 podStartE2EDuration="2.881888919s" podCreationTimestamp="2026-01-22 13:33:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:47.836082741 +0000 UTC m=+5935.414198586" watchObservedRunningTime="2026-01-22 13:33:47.881888919 +0000 UTC m=+5935.460004744" Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.917522 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-c6gl6" podStartSLOduration=2.91750005 podStartE2EDuration="2.91750005s" podCreationTimestamp="2026-01-22 13:33:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:47.87659045 +0000 UTC m=+5935.454706275" watchObservedRunningTime="2026-01-22 13:33:47.91750005 +0000 UTC m=+5935.495615875" Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.922615 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.922599743 podStartE2EDuration="2.922599743s" podCreationTimestamp="2026-01-22 13:33:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:47.894064101 +0000 UTC m=+5935.472179926" watchObservedRunningTime="2026-01-22 13:33:47.922599743 +0000 UTC m=+5935.500715568" Jan 22 13:33:47 crc kubenswrapper[4773]: I0122 13:33:47.932555 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.932533952 podStartE2EDuration="2.932533952s" podCreationTimestamp="2026-01-22 13:33:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:47.919836775 +0000 UTC m=+5935.497952600" watchObservedRunningTime="2026-01-22 13:33:47.932533952 +0000 UTC m=+5935.510649777" Jan 22 13:33:48 crc kubenswrapper[4773]: I0122 13:33:48.820646 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" event={"ID":"16320e64-130c-4657-acc2-6025d895a31d","Type":"ContainerStarted","Data":"f704af551d1d977eb806cea7d27265bea74385548000cda73956e99983dd372f"} Jan 22 13:33:48 crc kubenswrapper[4773]: I0122 13:33:48.824812 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" event={"ID":"56f9dd78-8e18-4b35-bacb-7159d1aefb47","Type":"ContainerStarted","Data":"655eacce59359ca3c618cff463d46066770147c9365189c7b284255618a30bec"} Jan 22 13:33:48 crc kubenswrapper[4773]: I0122 13:33:48.824856 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:48 crc kubenswrapper[4773]: I0122 13:33:48.843743 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" podStartSLOduration=2.843723732 podStartE2EDuration="2.843723732s" podCreationTimestamp="2026-01-22 13:33:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:48.836113988 +0000 UTC m=+5936.414229823" watchObservedRunningTime="2026-01-22 13:33:48.843723732 +0000 UTC m=+5936.421839547" Jan 22 13:33:48 crc kubenswrapper[4773]: I0122 13:33:48.865562 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" podStartSLOduration=2.8655380150000003 podStartE2EDuration="2.865538015s" podCreationTimestamp="2026-01-22 13:33:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:48.861476071 +0000 UTC m=+5936.439591916" watchObservedRunningTime="2026-01-22 13:33:48.865538015 +0000 UTC m=+5936.443653840" Jan 22 13:33:49 crc kubenswrapper[4773]: I0122 13:33:49.652636 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:49 crc kubenswrapper[4773]: I0122 13:33:49.676481 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:49 crc kubenswrapper[4773]: I0122 13:33:49.833682 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-log" containerID="cri-o://d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce" gracePeriod=30 Jan 22 13:33:49 crc kubenswrapper[4773]: I0122 13:33:49.833799 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-metadata" containerID="cri-o://d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab" gracePeriod=30 Jan 22 13:33:49 crc kubenswrapper[4773]: I0122 13:33:49.833850 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="b69e8a60-38b7-4461-ad44-f6402ca75bc3" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185" gracePeriod=30 Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.740909 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.794924 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.855722 4773 generic.go:334] "Generic (PLEG): container finished" podID="b69e8a60-38b7-4461-ad44-f6402ca75bc3" containerID="f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185" exitCode=0 Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.855772 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.855793 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b69e8a60-38b7-4461-ad44-f6402ca75bc3","Type":"ContainerDied","Data":"f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185"} Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.856490 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b69e8a60-38b7-4461-ad44-f6402ca75bc3","Type":"ContainerDied","Data":"f01596d8cb9f1019ac7e5c054987ba53f099cf1edc98d389f443effc8ce9ff72"} Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.856532 4773 scope.go:117] "RemoveContainer" containerID="f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.863954 4773 generic.go:334] "Generic (PLEG): container finished" podID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerID="d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab" exitCode=0 Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.864022 4773 generic.go:334] "Generic (PLEG): container finished" podID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerID="d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce" exitCode=143 Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.864078 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56","Type":"ContainerDied","Data":"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab"} Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.864116 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56","Type":"ContainerDied","Data":"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce"} Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.864128 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56","Type":"ContainerDied","Data":"34ec4f21275d172f782f74b629867e0280d6025237a9b29e8648c76c53e63de0"} Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.864138 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.880161 4773 scope.go:117] "RemoveContainer" containerID="f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185" Jan 22 13:33:50 crc kubenswrapper[4773]: E0122 13:33:50.880983 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185\": container with ID starting with f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185 not found: ID does not exist" containerID="f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.881091 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185"} err="failed to get container status \"f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185\": rpc error: code = NotFound desc = could not find container \"f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185\": container with ID starting with f076adb28bae42cdd541e3fbb5ddd349e9b4ccd96a54f6ea43cba23f12da3185 not found: ID does not exist" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.881171 4773 scope.go:117] "RemoveContainer" containerID="d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.916135 4773 scope.go:117] "RemoveContainer" containerID="d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935154 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-config-data\") pod \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935254 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-logs\") pod \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935375 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-config-data\") pod \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935406 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcgfd\" (UniqueName: \"kubernetes.io/projected/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-kube-api-access-dcgfd\") pod \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935457 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-combined-ca-bundle\") pod \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\" (UID: \"807e5518-3bc7-4c5f-84cd-2e2a6c4baf56\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935477 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-combined-ca-bundle\") pod \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.935507 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67sfm\" (UniqueName: \"kubernetes.io/projected/b69e8a60-38b7-4461-ad44-f6402ca75bc3-kube-api-access-67sfm\") pod \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\" (UID: \"b69e8a60-38b7-4461-ad44-f6402ca75bc3\") " Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.936384 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-logs" (OuterVolumeSpecName: "logs") pod "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" (UID: "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.937170 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.941689 4773 scope.go:117] "RemoveContainer" containerID="d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.942076 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-kube-api-access-dcgfd" (OuterVolumeSpecName: "kube-api-access-dcgfd") pod "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" (UID: "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56"). InnerVolumeSpecName "kube-api-access-dcgfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.942230 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b69e8a60-38b7-4461-ad44-f6402ca75bc3-kube-api-access-67sfm" (OuterVolumeSpecName: "kube-api-access-67sfm") pod "b69e8a60-38b7-4461-ad44-f6402ca75bc3" (UID: "b69e8a60-38b7-4461-ad44-f6402ca75bc3"). InnerVolumeSpecName "kube-api-access-67sfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:50 crc kubenswrapper[4773]: E0122 13:33:50.943870 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab\": container with ID starting with d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab not found: ID does not exist" containerID="d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.943950 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab"} err="failed to get container status \"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab\": rpc error: code = NotFound desc = could not find container \"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab\": container with ID starting with d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab not found: ID does not exist" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.943982 4773 scope.go:117] "RemoveContainer" containerID="d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce" Jan 22 13:33:50 crc kubenswrapper[4773]: E0122 13:33:50.945508 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce\": container with ID starting with d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce not found: ID does not exist" containerID="d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.945567 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce"} err="failed to get container status \"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce\": rpc error: code = NotFound desc = could not find container \"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce\": container with ID starting with d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce not found: ID does not exist" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.945622 4773 scope.go:117] "RemoveContainer" containerID="d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.950017 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab"} err="failed to get container status \"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab\": rpc error: code = NotFound desc = could not find container \"d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab\": container with ID starting with d39360aa0ddf417ecff2e1a2505ba56e9a82ac9b423b93707267837bb15351ab not found: ID does not exist" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.950056 4773 scope.go:117] "RemoveContainer" containerID="d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.953019 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce"} err="failed to get container status \"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce\": rpc error: code = NotFound desc = could not find container \"d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce\": container with ID starting with d9150e762b313d4c80f7796503fd4e5bc70be883188e6bb7020153298ca5cbce not found: ID does not exist" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.975691 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-config-data" (OuterVolumeSpecName: "config-data") pod "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" (UID: "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.977799 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-config-data" (OuterVolumeSpecName: "config-data") pod "b69e8a60-38b7-4461-ad44-f6402ca75bc3" (UID: "b69e8a60-38b7-4461-ad44-f6402ca75bc3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.985041 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" (UID: "807e5518-3bc7-4c5f-84cd-2e2a6c4baf56"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:50 crc kubenswrapper[4773]: I0122 13:33:50.998583 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b69e8a60-38b7-4461-ad44-f6402ca75bc3" (UID: "b69e8a60-38b7-4461-ad44-f6402ca75bc3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.038915 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.038953 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.038966 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67sfm\" (UniqueName: \"kubernetes.io/projected/b69e8a60-38b7-4461-ad44-f6402ca75bc3-kube-api-access-67sfm\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.038979 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.038990 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b69e8a60-38b7-4461-ad44-f6402ca75bc3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.039000 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcgfd\" (UniqueName: \"kubernetes.io/projected/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56-kube-api-access-dcgfd\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.097229 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.195281 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.204163 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.214054 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.230654 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.246378 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: E0122 13:33:51.246788 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69e8a60-38b7-4461-ad44-f6402ca75bc3" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.246809 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69e8a60-38b7-4461-ad44-f6402ca75bc3" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 13:33:51 crc kubenswrapper[4773]: E0122 13:33:51.246828 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-log" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.246835 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-log" Jan 22 13:33:51 crc kubenswrapper[4773]: E0122 13:33:51.246868 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-metadata" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.246874 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-metadata" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.247045 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-log" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.247071 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b69e8a60-38b7-4461-ad44-f6402ca75bc3" containerName="nova-cell1-novncproxy-novncproxy" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.247083 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" containerName="nova-metadata-metadata" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.247889 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.256570 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.256818 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.257016 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.261209 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.269440 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.271265 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.273953 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.274029 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.287441 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.445654 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446046 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/836b6c35-f275-4871-817e-59a3b6d18a6a-logs\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446159 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vvx7\" (UniqueName: \"kubernetes.io/projected/836b6c35-f275-4871-817e-59a3b6d18a6a-kube-api-access-9vvx7\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446199 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446232 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446265 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-config-data\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446555 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446674 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446787 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbcls\" (UniqueName: \"kubernetes.io/projected/c34aa5c6-00d7-48d2-97ea-ad35825b107d-kube-api-access-xbcls\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.446953 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549606 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/836b6c35-f275-4871-817e-59a3b6d18a6a-logs\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549690 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vvx7\" (UniqueName: \"kubernetes.io/projected/836b6c35-f275-4871-817e-59a3b6d18a6a-kube-api-access-9vvx7\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549716 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549743 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549766 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-config-data\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549809 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549829 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbcls\" (UniqueName: \"kubernetes.io/projected/c34aa5c6-00d7-48d2-97ea-ad35825b107d-kube-api-access-xbcls\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549905 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.549947 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.551270 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/836b6c35-f275-4871-817e-59a3b6d18a6a-logs\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.555640 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.555775 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.555978 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.557740 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-config-data\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.565213 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c34aa5c6-00d7-48d2-97ea-ad35825b107d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.573541 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.573975 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.576780 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbcls\" (UniqueName: \"kubernetes.io/projected/c34aa5c6-00d7-48d2-97ea-ad35825b107d-kube-api-access-xbcls\") pod \"nova-cell1-novncproxy-0\" (UID: \"c34aa5c6-00d7-48d2-97ea-ad35825b107d\") " pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.580134 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vvx7\" (UniqueName: \"kubernetes.io/projected/836b6c35-f275-4871-817e-59a3b6d18a6a-kube-api-access-9vvx7\") pod \"nova-metadata-0\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.597085 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.866841 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.878811 4773 generic.go:334] "Generic (PLEG): container finished" podID="16320e64-130c-4657-acc2-6025d895a31d" containerID="f704af551d1d977eb806cea7d27265bea74385548000cda73956e99983dd372f" exitCode=0 Jan 22 13:33:51 crc kubenswrapper[4773]: I0122 13:33:51.878846 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" event={"ID":"16320e64-130c-4657-acc2-6025d895a31d","Type":"ContainerDied","Data":"f704af551d1d977eb806cea7d27265bea74385548000cda73956e99983dd372f"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.091559 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.326131 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 22 13:33:52 crc kubenswrapper[4773]: W0122 13:33:52.327160 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc34aa5c6_00d7_48d2_97ea_ad35825b107d.slice/crio-65663657dcb49dcec18c0ee186b712bcf8abf3586bb6cbff828c29803c0b2d79 WatchSource:0}: Error finding container 65663657dcb49dcec18c0ee186b712bcf8abf3586bb6cbff828c29803c0b2d79: Status 404 returned error can't find the container with id 65663657dcb49dcec18c0ee186b712bcf8abf3586bb6cbff828c29803c0b2d79 Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.675585 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="807e5518-3bc7-4c5f-84cd-2e2a6c4baf56" path="/var/lib/kubelet/pods/807e5518-3bc7-4c5f-84cd-2e2a6c4baf56/volumes" Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.677200 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b69e8a60-38b7-4461-ad44-f6402ca75bc3" path="/var/lib/kubelet/pods/b69e8a60-38b7-4461-ad44-f6402ca75bc3/volumes" Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.890124 4773 generic.go:334] "Generic (PLEG): container finished" podID="5409c3ae-c282-4e60-bc93-e0c5bfdb1304" containerID="879cd149d7544b2f8d5892b15d2f0516649eef95b56cb998b7e5898652ef4c90" exitCode=0 Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.890211 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-c6gl6" event={"ID":"5409c3ae-c282-4e60-bc93-e0c5bfdb1304","Type":"ContainerDied","Data":"879cd149d7544b2f8d5892b15d2f0516649eef95b56cb998b7e5898652ef4c90"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.892079 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c34aa5c6-00d7-48d2-97ea-ad35825b107d","Type":"ContainerStarted","Data":"7dc56d86c3a59c409237e5a04cf2cbb8100e2d2210054f02de1897a3b2a15159"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.892164 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c34aa5c6-00d7-48d2-97ea-ad35825b107d","Type":"ContainerStarted","Data":"65663657dcb49dcec18c0ee186b712bcf8abf3586bb6cbff828c29803c0b2d79"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.894037 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"836b6c35-f275-4871-817e-59a3b6d18a6a","Type":"ContainerStarted","Data":"194e7638f2a4d61a414ce2d2a7e7f2350f3ac611cc1cef885f1da7d3f551a6f7"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.894082 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"836b6c35-f275-4871-817e-59a3b6d18a6a","Type":"ContainerStarted","Data":"22996bf3b0d45caecb93f177c2b68f074411f572de53e3af7e5afdf6c2446faf"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.894097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"836b6c35-f275-4871-817e-59a3b6d18a6a","Type":"ContainerStarted","Data":"547448ce99ef0fef3d6de39d8830654fa7d7f6a59aaf948d7c4c411ce21cbad2"} Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.957085 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.957057981 podStartE2EDuration="1.957057981s" podCreationTimestamp="2026-01-22 13:33:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:52.949334144 +0000 UTC m=+5940.527449989" watchObservedRunningTime="2026-01-22 13:33:52.957057981 +0000 UTC m=+5940.535173806" Jan 22 13:33:52 crc kubenswrapper[4773]: I0122 13:33:52.959518 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.95950829 podStartE2EDuration="1.95950829s" podCreationTimestamp="2026-01-22 13:33:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:52.929710192 +0000 UTC m=+5940.507826037" watchObservedRunningTime="2026-01-22 13:33:52.95950829 +0000 UTC m=+5940.537624115" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.278643 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.409001 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-config-data\") pod \"16320e64-130c-4657-acc2-6025d895a31d\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.409089 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2rt2\" (UniqueName: \"kubernetes.io/projected/16320e64-130c-4657-acc2-6025d895a31d-kube-api-access-n2rt2\") pod \"16320e64-130c-4657-acc2-6025d895a31d\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.409117 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-scripts\") pod \"16320e64-130c-4657-acc2-6025d895a31d\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.409142 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-combined-ca-bundle\") pod \"16320e64-130c-4657-acc2-6025d895a31d\" (UID: \"16320e64-130c-4657-acc2-6025d895a31d\") " Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.416688 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-scripts" (OuterVolumeSpecName: "scripts") pod "16320e64-130c-4657-acc2-6025d895a31d" (UID: "16320e64-130c-4657-acc2-6025d895a31d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.418539 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16320e64-130c-4657-acc2-6025d895a31d-kube-api-access-n2rt2" (OuterVolumeSpecName: "kube-api-access-n2rt2") pod "16320e64-130c-4657-acc2-6025d895a31d" (UID: "16320e64-130c-4657-acc2-6025d895a31d"). InnerVolumeSpecName "kube-api-access-n2rt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.441191 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-config-data" (OuterVolumeSpecName: "config-data") pod "16320e64-130c-4657-acc2-6025d895a31d" (UID: "16320e64-130c-4657-acc2-6025d895a31d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.475125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16320e64-130c-4657-acc2-6025d895a31d" (UID: "16320e64-130c-4657-acc2-6025d895a31d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.512282 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.512361 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2rt2\" (UniqueName: \"kubernetes.io/projected/16320e64-130c-4657-acc2-6025d895a31d-kube-api-access-n2rt2\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.512382 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.512399 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16320e64-130c-4657-acc2-6025d895a31d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.926224 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" event={"ID":"16320e64-130c-4657-acc2-6025d895a31d","Type":"ContainerDied","Data":"f9d471efba007403cd2576d345d3f6c9ced234bf27cc49066b3dc9a677e34461"} Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.926282 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9d471efba007403cd2576d345d3f6c9ced234bf27cc49066b3dc9a677e34461" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.926393 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-s4ssz" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.993536 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 13:33:53 crc kubenswrapper[4773]: E0122 13:33:53.994019 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16320e64-130c-4657-acc2-6025d895a31d" containerName="nova-cell1-conductor-db-sync" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.994038 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="16320e64-130c-4657-acc2-6025d895a31d" containerName="nova-cell1-conductor-db-sync" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.994222 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="16320e64-130c-4657-acc2-6025d895a31d" containerName="nova-cell1-conductor-db-sync" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.994946 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:53 crc kubenswrapper[4773]: I0122 13:33:53.998307 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.010885 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.025182 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.025249 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.025337 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqwm9\" (UniqueName: \"kubernetes.io/projected/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-kube-api-access-kqwm9\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.127886 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.127967 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.128016 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqwm9\" (UniqueName: \"kubernetes.io/projected/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-kube-api-access-kqwm9\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.139448 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.139519 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.150670 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqwm9\" (UniqueName: \"kubernetes.io/projected/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-kube-api-access-kqwm9\") pod \"nova-cell1-conductor-0\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.340600 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.373795 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.539408 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2d68\" (UniqueName: \"kubernetes.io/projected/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-kube-api-access-s2d68\") pod \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.539551 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle\") pod \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.539611 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-scripts\") pod \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.539675 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-config-data\") pod \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.544408 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-kube-api-access-s2d68" (OuterVolumeSpecName: "kube-api-access-s2d68") pod "5409c3ae-c282-4e60-bc93-e0c5bfdb1304" (UID: "5409c3ae-c282-4e60-bc93-e0c5bfdb1304"). InnerVolumeSpecName "kube-api-access-s2d68". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.544719 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-scripts" (OuterVolumeSpecName: "scripts") pod "5409c3ae-c282-4e60-bc93-e0c5bfdb1304" (UID: "5409c3ae-c282-4e60-bc93-e0c5bfdb1304"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:54 crc kubenswrapper[4773]: E0122 13:33:54.572750 4773 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle podName:5409c3ae-c282-4e60-bc93-e0c5bfdb1304 nodeName:}" failed. No retries permitted until 2026-01-22 13:33:55.07271364 +0000 UTC m=+5942.650829465 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle") pod "5409c3ae-c282-4e60-bc93-e0c5bfdb1304" (UID: "5409c3ae-c282-4e60-bc93-e0c5bfdb1304") : error deleting /var/lib/kubelet/pods/5409c3ae-c282-4e60-bc93-e0c5bfdb1304/volume-subpaths: remove /var/lib/kubelet/pods/5409c3ae-c282-4e60-bc93-e0c5bfdb1304/volume-subpaths: no such file or directory Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.576334 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-config-data" (OuterVolumeSpecName: "config-data") pod "5409c3ae-c282-4e60-bc93-e0c5bfdb1304" (UID: "5409c3ae-c282-4e60-bc93-e0c5bfdb1304"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.641339 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2d68\" (UniqueName: \"kubernetes.io/projected/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-kube-api-access-s2d68\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.641376 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.641393 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.794917 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 13:33:54 crc kubenswrapper[4773]: W0122 13:33:54.798935 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b7deba1_1cc3_4625_a248_3fb1aa70bf69.slice/crio-9cdedce876d2977b440f4966cc43f733bcb8b4cd1604609a9d1cc5b84478d725 WatchSource:0}: Error finding container 9cdedce876d2977b440f4966cc43f733bcb8b4cd1604609a9d1cc5b84478d725: Status 404 returned error can't find the container with id 9cdedce876d2977b440f4966cc43f733bcb8b4cd1604609a9d1cc5b84478d725 Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.939467 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-c6gl6" event={"ID":"5409c3ae-c282-4e60-bc93-e0c5bfdb1304","Type":"ContainerDied","Data":"71b56680cb95fd1be9543bf83bc68749455be0286210a189452626957e4a2fe7"} Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.939780 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71b56680cb95fd1be9543bf83bc68749455be0286210a189452626957e4a2fe7" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.939860 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-c6gl6" Jan 22 13:33:54 crc kubenswrapper[4773]: I0122 13:33:54.942149 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6b7deba1-1cc3-4625-a248-3fb1aa70bf69","Type":"ContainerStarted","Data":"9cdedce876d2977b440f4966cc43f733bcb8b4cd1604609a9d1cc5b84478d725"} Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.093247 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.093556 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-log" containerID="cri-o://219828e5c4a0a0bba023541b6ecec32b35f8bf06b646e96e2dc5b8428fe1b386" gracePeriod=30 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.093978 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-api" containerID="cri-o://62aa3ed0057ee80ce4ab3d56e50d399a0abab78f0cd6cdf3264ba28f94e45dd8" gracePeriod=30 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.107621 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.107831 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="80dfc6ef-7650-4890-b457-9e221da5fc24" containerName="nova-scheduler-scheduler" containerID="cri-o://95191aa98448678d853ea30ef9e272b61203d87a0acacceeb8f937995f4ea5a5" gracePeriod=30 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.139383 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.139672 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-log" containerID="cri-o://22996bf3b0d45caecb93f177c2b68f074411f572de53e3af7e5afdf6c2446faf" gracePeriod=30 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.139740 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-metadata" containerID="cri-o://194e7638f2a4d61a414ce2d2a7e7f2350f3ac611cc1cef885f1da7d3f551a6f7" gracePeriod=30 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.151533 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle\") pod \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\" (UID: \"5409c3ae-c282-4e60-bc93-e0c5bfdb1304\") " Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.156989 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5409c3ae-c282-4e60-bc93-e0c5bfdb1304" (UID: "5409c3ae-c282-4e60-bc93-e0c5bfdb1304"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.256393 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5409c3ae-c282-4e60-bc93-e0c5bfdb1304-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.957275 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6b7deba1-1cc3-4625-a248-3fb1aa70bf69","Type":"ContainerStarted","Data":"f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb"} Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.958655 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.966791 4773 generic.go:334] "Generic (PLEG): container finished" podID="33888722-8904-4778-b046-b9d85663d4f0" containerID="62aa3ed0057ee80ce4ab3d56e50d399a0abab78f0cd6cdf3264ba28f94e45dd8" exitCode=0 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.966825 4773 generic.go:334] "Generic (PLEG): container finished" podID="33888722-8904-4778-b046-b9d85663d4f0" containerID="219828e5c4a0a0bba023541b6ecec32b35f8bf06b646e96e2dc5b8428fe1b386" exitCode=143 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.966890 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"33888722-8904-4778-b046-b9d85663d4f0","Type":"ContainerDied","Data":"62aa3ed0057ee80ce4ab3d56e50d399a0abab78f0cd6cdf3264ba28f94e45dd8"} Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.966934 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"33888722-8904-4778-b046-b9d85663d4f0","Type":"ContainerDied","Data":"219828e5c4a0a0bba023541b6ecec32b35f8bf06b646e96e2dc5b8428fe1b386"} Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.968665 4773 generic.go:334] "Generic (PLEG): container finished" podID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerID="194e7638f2a4d61a414ce2d2a7e7f2350f3ac611cc1cef885f1da7d3f551a6f7" exitCode=0 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.968680 4773 generic.go:334] "Generic (PLEG): container finished" podID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerID="22996bf3b0d45caecb93f177c2b68f074411f572de53e3af7e5afdf6c2446faf" exitCode=143 Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.968696 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"836b6c35-f275-4871-817e-59a3b6d18a6a","Type":"ContainerDied","Data":"194e7638f2a4d61a414ce2d2a7e7f2350f3ac611cc1cef885f1da7d3f551a6f7"} Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.968711 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"836b6c35-f275-4871-817e-59a3b6d18a6a","Type":"ContainerDied","Data":"22996bf3b0d45caecb93f177c2b68f074411f572de53e3af7e5afdf6c2446faf"} Jan 22 13:33:55 crc kubenswrapper[4773]: I0122 13:33:55.985925 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.985903469 podStartE2EDuration="2.985903469s" podCreationTimestamp="2026-01-22 13:33:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:55.976809223 +0000 UTC m=+5943.554925038" watchObservedRunningTime="2026-01-22 13:33:55.985903469 +0000 UTC m=+5943.564019294" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.226264 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.233362 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384382 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vvx7\" (UniqueName: \"kubernetes.io/projected/836b6c35-f275-4871-817e-59a3b6d18a6a-kube-api-access-9vvx7\") pod \"836b6c35-f275-4871-817e-59a3b6d18a6a\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384460 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/836b6c35-f275-4871-817e-59a3b6d18a6a-logs\") pod \"836b6c35-f275-4871-817e-59a3b6d18a6a\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384511 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-config-data\") pod \"33888722-8904-4778-b046-b9d85663d4f0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384571 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-combined-ca-bundle\") pod \"836b6c35-f275-4871-817e-59a3b6d18a6a\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384664 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-nova-metadata-tls-certs\") pod \"836b6c35-f275-4871-817e-59a3b6d18a6a\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384718 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-combined-ca-bundle\") pod \"33888722-8904-4778-b046-b9d85663d4f0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384766 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33888722-8904-4778-b046-b9d85663d4f0-logs\") pod \"33888722-8904-4778-b046-b9d85663d4f0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384807 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-config-data\") pod \"836b6c35-f275-4871-817e-59a3b6d18a6a\" (UID: \"836b6c35-f275-4871-817e-59a3b6d18a6a\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.384855 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2ngx\" (UniqueName: \"kubernetes.io/projected/33888722-8904-4778-b046-b9d85663d4f0-kube-api-access-x2ngx\") pod \"33888722-8904-4778-b046-b9d85663d4f0\" (UID: \"33888722-8904-4778-b046-b9d85663d4f0\") " Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.386002 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33888722-8904-4778-b046-b9d85663d4f0-logs" (OuterVolumeSpecName: "logs") pod "33888722-8904-4778-b046-b9d85663d4f0" (UID: "33888722-8904-4778-b046-b9d85663d4f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.386037 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/836b6c35-f275-4871-817e-59a3b6d18a6a-logs" (OuterVolumeSpecName: "logs") pod "836b6c35-f275-4871-817e-59a3b6d18a6a" (UID: "836b6c35-f275-4871-817e-59a3b6d18a6a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.390342 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33888722-8904-4778-b046-b9d85663d4f0-kube-api-access-x2ngx" (OuterVolumeSpecName: "kube-api-access-x2ngx") pod "33888722-8904-4778-b046-b9d85663d4f0" (UID: "33888722-8904-4778-b046-b9d85663d4f0"). InnerVolumeSpecName "kube-api-access-x2ngx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.390463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/836b6c35-f275-4871-817e-59a3b6d18a6a-kube-api-access-9vvx7" (OuterVolumeSpecName: "kube-api-access-9vvx7") pod "836b6c35-f275-4871-817e-59a3b6d18a6a" (UID: "836b6c35-f275-4871-817e-59a3b6d18a6a"). InnerVolumeSpecName "kube-api-access-9vvx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.410837 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-config-data" (OuterVolumeSpecName: "config-data") pod "836b6c35-f275-4871-817e-59a3b6d18a6a" (UID: "836b6c35-f275-4871-817e-59a3b6d18a6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.412099 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33888722-8904-4778-b046-b9d85663d4f0" (UID: "33888722-8904-4778-b046-b9d85663d4f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.413864 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-config-data" (OuterVolumeSpecName: "config-data") pod "33888722-8904-4778-b046-b9d85663d4f0" (UID: "33888722-8904-4778-b046-b9d85663d4f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.417875 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "836b6c35-f275-4871-817e-59a3b6d18a6a" (UID: "836b6c35-f275-4871-817e-59a3b6d18a6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.439441 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "836b6c35-f275-4871-817e-59a3b6d18a6a" (UID: "836b6c35-f275-4871-817e-59a3b6d18a6a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.487445 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488080 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488116 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2ngx\" (UniqueName: \"kubernetes.io/projected/33888722-8904-4778-b046-b9d85663d4f0-kube-api-access-x2ngx\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488127 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vvx7\" (UniqueName: \"kubernetes.io/projected/836b6c35-f275-4871-817e-59a3b6d18a6a-kube-api-access-9vvx7\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488136 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/836b6c35-f275-4871-817e-59a3b6d18a6a-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488145 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488156 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488167 4773 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/836b6c35-f275-4871-817e-59a3b6d18a6a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488178 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33888722-8904-4778-b046-b9d85663d4f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.488189 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33888722-8904-4778-b046-b9d85663d4f0-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.550072 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fdd6f7b5-8qdg8"] Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.550344 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerName="dnsmasq-dns" containerID="cri-o://a2a90ccf4bd5fcbd2dacea3a1d716a5077c295770778c356968639f6cab9774f" gracePeriod=10 Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.867360 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.980945 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"33888722-8904-4778-b046-b9d85663d4f0","Type":"ContainerDied","Data":"789ca391b08f4013955a2330f4e4ff65762b0b4491d026ade9e95ab76c553c19"} Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.981032 4773 scope.go:117] "RemoveContainer" containerID="62aa3ed0057ee80ce4ab3d56e50d399a0abab78f0cd6cdf3264ba28f94e45dd8" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.980969 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.989073 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.989084 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"836b6c35-f275-4871-817e-59a3b6d18a6a","Type":"ContainerDied","Data":"547448ce99ef0fef3d6de39d8830654fa7d7f6a59aaf948d7c4c411ce21cbad2"} Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.995204 4773 generic.go:334] "Generic (PLEG): container finished" podID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerID="a2a90ccf4bd5fcbd2dacea3a1d716a5077c295770778c356968639f6cab9774f" exitCode=0 Jan 22 13:33:56 crc kubenswrapper[4773]: I0122 13:33:56.995519 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" event={"ID":"d35443f6-ccfd-4bb1-b1e2-f1899be1b916","Type":"ContainerDied","Data":"a2a90ccf4bd5fcbd2dacea3a1d716a5077c295770778c356968639f6cab9774f"} Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.077458 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.094989 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.105363 4773 scope.go:117] "RemoveContainer" containerID="219828e5c4a0a0bba023541b6ecec32b35f8bf06b646e96e2dc5b8428fe1b386" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.110188 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150048 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150630 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerName="init" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150650 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerName="init" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150674 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-log" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150685 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-log" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150704 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-metadata" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150713 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-metadata" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150737 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5409c3ae-c282-4e60-bc93-e0c5bfdb1304" containerName="nova-manage" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150745 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5409c3ae-c282-4e60-bc93-e0c5bfdb1304" containerName="nova-manage" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150763 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-log" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150771 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-log" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150789 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-api" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150795 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-api" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.150809 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerName="dnsmasq-dns" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.150818 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerName="dnsmasq-dns" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.151025 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-metadata" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.151045 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" containerName="dnsmasq-dns" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.151057 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5409c3ae-c282-4e60-bc93-e0c5bfdb1304" containerName="nova-manage" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.151073 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-log" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.151094 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" containerName="nova-metadata-log" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.151112 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="33888722-8904-4778-b046-b9d85663d4f0" containerName="nova-api-api" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.152397 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.163973 4773 scope.go:117] "RemoveContainer" containerID="194e7638f2a4d61a414ce2d2a7e7f2350f3ac611cc1cef885f1da7d3f551a6f7" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.164318 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.164490 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.173835 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.197077 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.199209 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr7gh\" (UniqueName: \"kubernetes.io/projected/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-kube-api-access-jr7gh\") pod \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.199312 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-config\") pod \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.199363 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-sb\") pod \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.199440 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-nb\") pod \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.199489 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-dns-svc\") pod \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\" (UID: \"d35443f6-ccfd-4bb1-b1e2-f1899be1b916\") " Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.208131 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-kube-api-access-jr7gh" (OuterVolumeSpecName: "kube-api-access-jr7gh") pod "d35443f6-ccfd-4bb1-b1e2-f1899be1b916" (UID: "d35443f6-ccfd-4bb1-b1e2-f1899be1b916"). InnerVolumeSpecName "kube-api-access-jr7gh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.215492 4773 scope.go:117] "RemoveContainer" containerID="22996bf3b0d45caecb93f177c2b68f074411f572de53e3af7e5afdf6c2446faf" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.250381 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.254153 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-config" (OuterVolumeSpecName: "config") pod "d35443f6-ccfd-4bb1-b1e2-f1899be1b916" (UID: "d35443f6-ccfd-4bb1-b1e2-f1899be1b916"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.254473 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d35443f6-ccfd-4bb1-b1e2-f1899be1b916" (UID: "d35443f6-ccfd-4bb1-b1e2-f1899be1b916"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.262821 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.264269 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.266596 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.266863 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d35443f6-ccfd-4bb1-b1e2-f1899be1b916" (UID: "d35443f6-ccfd-4bb1-b1e2-f1899be1b916"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.268447 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d35443f6-ccfd-4bb1-b1e2-f1899be1b916" (UID: "d35443f6-ccfd-4bb1-b1e2-f1899be1b916"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.290959 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.301630 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.301721 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73610b81-bd66-4f9a-8810-8176c084f6a4-logs\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.301767 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.301804 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-config-data\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.301889 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thhjq\" (UniqueName: \"kubernetes.io/projected/73610b81-bd66-4f9a-8810-8176c084f6a4-kube-api-access-thhjq\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.301970 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr7gh\" (UniqueName: \"kubernetes.io/projected/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-kube-api-access-jr7gh\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.302420 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.302443 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.302474 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.302482 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d35443f6-ccfd-4bb1-b1e2-f1899be1b916-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404343 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-config-data\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404468 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b64008fd-61b4-43f0-9827-b6b25f2bc4be-logs\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404505 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-config-data\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404551 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404647 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r857q\" (UniqueName: \"kubernetes.io/projected/b64008fd-61b4-43f0-9827-b6b25f2bc4be-kube-api-access-r857q\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404699 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thhjq\" (UniqueName: \"kubernetes.io/projected/73610b81-bd66-4f9a-8810-8176c084f6a4-kube-api-access-thhjq\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404880 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.404951 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73610b81-bd66-4f9a-8810-8176c084f6a4-logs\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.405051 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.405604 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73610b81-bd66-4f9a-8810-8176c084f6a4-logs\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.408742 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.408890 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.408951 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-config-data\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.423911 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thhjq\" (UniqueName: \"kubernetes.io/projected/73610b81-bd66-4f9a-8810-8176c084f6a4-kube-api-access-thhjq\") pod \"nova-metadata-0\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.484515 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.506445 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b64008fd-61b4-43f0-9827-b6b25f2bc4be-logs\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.506739 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-config-data\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.506885 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.506970 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b64008fd-61b4-43f0-9827-b6b25f2bc4be-logs\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.507088 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r857q\" (UniqueName: \"kubernetes.io/projected/b64008fd-61b4-43f0-9827-b6b25f2bc4be-kube-api-access-r857q\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.509846 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-config-data\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.510314 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.536839 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r857q\" (UniqueName: \"kubernetes.io/projected/b64008fd-61b4-43f0-9827-b6b25f2bc4be-kube-api-access-r857q\") pod \"nova-api-0\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.658245 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:33:57 crc kubenswrapper[4773]: E0122 13:33:57.658573 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.663884 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:33:57 crc kubenswrapper[4773]: I0122 13:33:57.935837 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.011318 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73610b81-bd66-4f9a-8810-8176c084f6a4","Type":"ContainerStarted","Data":"26c6666609dcbf7face0aeedc1d91add6040b773424f87e045d6cec780b88641"} Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.014770 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" event={"ID":"d35443f6-ccfd-4bb1-b1e2-f1899be1b916","Type":"ContainerDied","Data":"771b2c1472693cb3075f79b09d6f34f1aad469cf4c94e943322fc213d1490fd2"} Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.014822 4773 scope.go:117] "RemoveContainer" containerID="a2a90ccf4bd5fcbd2dacea3a1d716a5077c295770778c356968639f6cab9774f" Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.014859 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fdd6f7b5-8qdg8" Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.055949 4773 scope.go:117] "RemoveContainer" containerID="d2405f45091a425cc22317c171ff888915a10b6e93fcf3773e0cb11de4d5ab0a" Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.089348 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fdd6f7b5-8qdg8"] Jan 22 13:33:58 crc kubenswrapper[4773]: W0122 13:33:58.096566 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb64008fd_61b4_43f0_9827_b6b25f2bc4be.slice/crio-9bde433b7cfa638fcbb2a958b08480c60da4de268987290e1fa9fe40ae932433 WatchSource:0}: Error finding container 9bde433b7cfa638fcbb2a958b08480c60da4de268987290e1fa9fe40ae932433: Status 404 returned error can't find the container with id 9bde433b7cfa638fcbb2a958b08480c60da4de268987290e1fa9fe40ae932433 Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.097932 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fdd6f7b5-8qdg8"] Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.106140 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.670922 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33888722-8904-4778-b046-b9d85663d4f0" path="/var/lib/kubelet/pods/33888722-8904-4778-b046-b9d85663d4f0/volumes" Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.672113 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="836b6c35-f275-4871-817e-59a3b6d18a6a" path="/var/lib/kubelet/pods/836b6c35-f275-4871-817e-59a3b6d18a6a/volumes" Jan 22 13:33:58 crc kubenswrapper[4773]: I0122 13:33:58.672889 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d35443f6-ccfd-4bb1-b1e2-f1899be1b916" path="/var/lib/kubelet/pods/d35443f6-ccfd-4bb1-b1e2-f1899be1b916/volumes" Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.029389 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b64008fd-61b4-43f0-9827-b6b25f2bc4be","Type":"ContainerStarted","Data":"8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe"} Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.029439 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b64008fd-61b4-43f0-9827-b6b25f2bc4be","Type":"ContainerStarted","Data":"a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569"} Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.029454 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b64008fd-61b4-43f0-9827-b6b25f2bc4be","Type":"ContainerStarted","Data":"9bde433b7cfa638fcbb2a958b08480c60da4de268987290e1fa9fe40ae932433"} Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.031631 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73610b81-bd66-4f9a-8810-8176c084f6a4","Type":"ContainerStarted","Data":"8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad"} Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.031659 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73610b81-bd66-4f9a-8810-8176c084f6a4","Type":"ContainerStarted","Data":"ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a"} Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.056683 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.056661635 podStartE2EDuration="2.056661635s" podCreationTimestamp="2026-01-22 13:33:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:59.048360501 +0000 UTC m=+5946.626476336" watchObservedRunningTime="2026-01-22 13:33:59.056661635 +0000 UTC m=+5946.634777480" Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.076020 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.075994218 podStartE2EDuration="2.075994218s" podCreationTimestamp="2026-01-22 13:33:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:33:59.065541894 +0000 UTC m=+5946.643657719" watchObservedRunningTime="2026-01-22 13:33:59.075994218 +0000 UTC m=+5946.654110063" Jan 22 13:33:59 crc kubenswrapper[4773]: I0122 13:33:59.385421 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 22 13:34:01 crc kubenswrapper[4773]: I0122 13:34:01.867168 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:34:01 crc kubenswrapper[4773]: I0122 13:34:01.891474 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.085671 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.249218 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-pw7mf"] Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.250696 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.253071 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.257110 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.264834 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pw7mf"] Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.350640 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-scripts\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.350844 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.350889 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkzln\" (UniqueName: \"kubernetes.io/projected/afb23a79-30ca-4764-8e4b-2dcf54c41fac-kube-api-access-qkzln\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.350937 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-config-data\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.453240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.453609 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkzln\" (UniqueName: \"kubernetes.io/projected/afb23a79-30ca-4764-8e4b-2dcf54c41fac-kube-api-access-qkzln\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.453641 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-config-data\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.453725 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-scripts\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.461801 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-scripts\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.461896 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-config-data\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.463035 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.470933 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkzln\" (UniqueName: \"kubernetes.io/projected/afb23a79-30ca-4764-8e4b-2dcf54c41fac-kube-api-access-qkzln\") pod \"nova-cell1-cell-mapping-pw7mf\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.484671 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.484724 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 13:34:02 crc kubenswrapper[4773]: I0122 13:34:02.579489 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:03 crc kubenswrapper[4773]: I0122 13:34:03.046647 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-pw7mf"] Jan 22 13:34:03 crc kubenswrapper[4773]: W0122 13:34:03.047100 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafb23a79_30ca_4764_8e4b_2dcf54c41fac.slice/crio-bc81c9806102f1708eb8d62a766249d87edfe6b4ea42c5df9fb1057f20a78d52 WatchSource:0}: Error finding container bc81c9806102f1708eb8d62a766249d87edfe6b4ea42c5df9fb1057f20a78d52: Status 404 returned error can't find the container with id bc81c9806102f1708eb8d62a766249d87edfe6b4ea42c5df9fb1057f20a78d52 Jan 22 13:34:03 crc kubenswrapper[4773]: I0122 13:34:03.077182 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pw7mf" event={"ID":"afb23a79-30ca-4764-8e4b-2dcf54c41fac","Type":"ContainerStarted","Data":"bc81c9806102f1708eb8d62a766249d87edfe6b4ea42c5df9fb1057f20a78d52"} Jan 22 13:34:04 crc kubenswrapper[4773]: I0122 13:34:04.090424 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pw7mf" event={"ID":"afb23a79-30ca-4764-8e4b-2dcf54c41fac","Type":"ContainerStarted","Data":"60702661b7c83eb6e6dc5e8f34f1bcd1513e5efd77c433803e28a657e3025bf5"} Jan 22 13:34:04 crc kubenswrapper[4773]: I0122 13:34:04.110089 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-pw7mf" podStartSLOduration=2.110066583 podStartE2EDuration="2.110066583s" podCreationTimestamp="2026-01-22 13:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:34:04.107410879 +0000 UTC m=+5951.685526704" watchObservedRunningTime="2026-01-22 13:34:04.110066583 +0000 UTC m=+5951.688182408" Jan 22 13:34:07 crc kubenswrapper[4773]: I0122 13:34:07.485415 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 13:34:07 crc kubenswrapper[4773]: I0122 13:34:07.485992 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 13:34:07 crc kubenswrapper[4773]: I0122 13:34:07.665623 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 13:34:07 crc kubenswrapper[4773]: I0122 13:34:07.665668 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 13:34:08 crc kubenswrapper[4773]: I0122 13:34:08.503489 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.93:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:08 crc kubenswrapper[4773]: I0122 13:34:08.516588 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.93:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:08 crc kubenswrapper[4773]: I0122 13:34:08.658205 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:34:08 crc kubenswrapper[4773]: E0122 13:34:08.658533 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:34:08 crc kubenswrapper[4773]: I0122 13:34:08.748553 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.94:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:08 crc kubenswrapper[4773]: I0122 13:34:08.748667 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.94:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:09 crc kubenswrapper[4773]: I0122 13:34:09.146329 4773 generic.go:334] "Generic (PLEG): container finished" podID="afb23a79-30ca-4764-8e4b-2dcf54c41fac" containerID="60702661b7c83eb6e6dc5e8f34f1bcd1513e5efd77c433803e28a657e3025bf5" exitCode=0 Jan 22 13:34:09 crc kubenswrapper[4773]: I0122 13:34:09.146428 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pw7mf" event={"ID":"afb23a79-30ca-4764-8e4b-2dcf54c41fac","Type":"ContainerDied","Data":"60702661b7c83eb6e6dc5e8f34f1bcd1513e5efd77c433803e28a657e3025bf5"} Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.516469 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.639722 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-scripts\") pod \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.640177 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkzln\" (UniqueName: \"kubernetes.io/projected/afb23a79-30ca-4764-8e4b-2dcf54c41fac-kube-api-access-qkzln\") pod \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.640348 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-config-data\") pod \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.640374 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-combined-ca-bundle\") pod \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\" (UID: \"afb23a79-30ca-4764-8e4b-2dcf54c41fac\") " Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.646479 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afb23a79-30ca-4764-8e4b-2dcf54c41fac-kube-api-access-qkzln" (OuterVolumeSpecName: "kube-api-access-qkzln") pod "afb23a79-30ca-4764-8e4b-2dcf54c41fac" (UID: "afb23a79-30ca-4764-8e4b-2dcf54c41fac"). InnerVolumeSpecName "kube-api-access-qkzln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.647034 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-scripts" (OuterVolumeSpecName: "scripts") pod "afb23a79-30ca-4764-8e4b-2dcf54c41fac" (UID: "afb23a79-30ca-4764-8e4b-2dcf54c41fac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.666410 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afb23a79-30ca-4764-8e4b-2dcf54c41fac" (UID: "afb23a79-30ca-4764-8e4b-2dcf54c41fac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.699530 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-config-data" (OuterVolumeSpecName: "config-data") pod "afb23a79-30ca-4764-8e4b-2dcf54c41fac" (UID: "afb23a79-30ca-4764-8e4b-2dcf54c41fac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.744313 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkzln\" (UniqueName: \"kubernetes.io/projected/afb23a79-30ca-4764-8e4b-2dcf54c41fac-kube-api-access-qkzln\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.744349 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.744364 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:10 crc kubenswrapper[4773]: I0122 13:34:10.744376 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afb23a79-30ca-4764-8e4b-2dcf54c41fac-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.166183 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-pw7mf" event={"ID":"afb23a79-30ca-4764-8e4b-2dcf54c41fac","Type":"ContainerDied","Data":"bc81c9806102f1708eb8d62a766249d87edfe6b4ea42c5df9fb1057f20a78d52"} Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.166229 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc81c9806102f1708eb8d62a766249d87edfe6b4ea42c5df9fb1057f20a78d52" Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.166306 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-pw7mf" Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.357118 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.357555 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-api" containerID="cri-o://8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe" gracePeriod=30 Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.357531 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-log" containerID="cri-o://a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569" gracePeriod=30 Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.433072 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.433669 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-log" containerID="cri-o://ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a" gracePeriod=30 Jan 22 13:34:11 crc kubenswrapper[4773]: I0122 13:34:11.433824 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-metadata" containerID="cri-o://8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad" gracePeriod=30 Jan 22 13:34:12 crc kubenswrapper[4773]: I0122 13:34:12.179244 4773 generic.go:334] "Generic (PLEG): container finished" podID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerID="a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569" exitCode=143 Jan 22 13:34:12 crc kubenswrapper[4773]: I0122 13:34:12.179312 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b64008fd-61b4-43f0-9827-b6b25f2bc4be","Type":"ContainerDied","Data":"a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569"} Jan 22 13:34:12 crc kubenswrapper[4773]: I0122 13:34:12.181453 4773 generic.go:334] "Generic (PLEG): container finished" podID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerID="ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a" exitCode=143 Jan 22 13:34:12 crc kubenswrapper[4773]: I0122 13:34:12.181486 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73610b81-bd66-4f9a-8810-8176c084f6a4","Type":"ContainerDied","Data":"ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a"} Jan 22 13:34:14 crc kubenswrapper[4773]: I0122 13:34:14.853489 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:14 crc kubenswrapper[4773]: I0122 13:34:14.974853 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.033990 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-config-data\") pod \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.034036 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-combined-ca-bundle\") pod \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.034059 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b64008fd-61b4-43f0-9827-b6b25f2bc4be-logs\") pod \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.034079 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r857q\" (UniqueName: \"kubernetes.io/projected/b64008fd-61b4-43f0-9827-b6b25f2bc4be-kube-api-access-r857q\") pod \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\" (UID: \"b64008fd-61b4-43f0-9827-b6b25f2bc4be\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.034867 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b64008fd-61b4-43f0-9827-b6b25f2bc4be-logs" (OuterVolumeSpecName: "logs") pod "b64008fd-61b4-43f0-9827-b6b25f2bc4be" (UID: "b64008fd-61b4-43f0-9827-b6b25f2bc4be"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.040663 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b64008fd-61b4-43f0-9827-b6b25f2bc4be-kube-api-access-r857q" (OuterVolumeSpecName: "kube-api-access-r857q") pod "b64008fd-61b4-43f0-9827-b6b25f2bc4be" (UID: "b64008fd-61b4-43f0-9827-b6b25f2bc4be"). InnerVolumeSpecName "kube-api-access-r857q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.060893 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-config-data" (OuterVolumeSpecName: "config-data") pod "b64008fd-61b4-43f0-9827-b6b25f2bc4be" (UID: "b64008fd-61b4-43f0-9827-b6b25f2bc4be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.063600 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b64008fd-61b4-43f0-9827-b6b25f2bc4be" (UID: "b64008fd-61b4-43f0-9827-b6b25f2bc4be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.135827 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-combined-ca-bundle\") pod \"73610b81-bd66-4f9a-8810-8176c084f6a4\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.136189 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73610b81-bd66-4f9a-8810-8176c084f6a4-logs\") pod \"73610b81-bd66-4f9a-8810-8176c084f6a4\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.136734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73610b81-bd66-4f9a-8810-8176c084f6a4-logs" (OuterVolumeSpecName: "logs") pod "73610b81-bd66-4f9a-8810-8176c084f6a4" (UID: "73610b81-bd66-4f9a-8810-8176c084f6a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.137036 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thhjq\" (UniqueName: \"kubernetes.io/projected/73610b81-bd66-4f9a-8810-8176c084f6a4-kube-api-access-thhjq\") pod \"73610b81-bd66-4f9a-8810-8176c084f6a4\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.137585 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-nova-metadata-tls-certs\") pod \"73610b81-bd66-4f9a-8810-8176c084f6a4\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.137735 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-config-data\") pod \"73610b81-bd66-4f9a-8810-8176c084f6a4\" (UID: \"73610b81-bd66-4f9a-8810-8176c084f6a4\") " Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.138348 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.138445 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b64008fd-61b4-43f0-9827-b6b25f2bc4be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.138523 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b64008fd-61b4-43f0-9827-b6b25f2bc4be-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.138686 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r857q\" (UniqueName: \"kubernetes.io/projected/b64008fd-61b4-43f0-9827-b6b25f2bc4be-kube-api-access-r857q\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.138766 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73610b81-bd66-4f9a-8810-8176c084f6a4-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.139511 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73610b81-bd66-4f9a-8810-8176c084f6a4-kube-api-access-thhjq" (OuterVolumeSpecName: "kube-api-access-thhjq") pod "73610b81-bd66-4f9a-8810-8176c084f6a4" (UID: "73610b81-bd66-4f9a-8810-8176c084f6a4"). InnerVolumeSpecName "kube-api-access-thhjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.163236 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73610b81-bd66-4f9a-8810-8176c084f6a4" (UID: "73610b81-bd66-4f9a-8810-8176c084f6a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.164334 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-config-data" (OuterVolumeSpecName: "config-data") pod "73610b81-bd66-4f9a-8810-8176c084f6a4" (UID: "73610b81-bd66-4f9a-8810-8176c084f6a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.180432 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "73610b81-bd66-4f9a-8810-8176c084f6a4" (UID: "73610b81-bd66-4f9a-8810-8176c084f6a4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.215270 4773 generic.go:334] "Generic (PLEG): container finished" podID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerID="8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad" exitCode=0 Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.215336 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.215360 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73610b81-bd66-4f9a-8810-8176c084f6a4","Type":"ContainerDied","Data":"8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad"} Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.215386 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"73610b81-bd66-4f9a-8810-8176c084f6a4","Type":"ContainerDied","Data":"26c6666609dcbf7face0aeedc1d91add6040b773424f87e045d6cec780b88641"} Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.215404 4773 scope.go:117] "RemoveContainer" containerID="8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.217434 4773 generic.go:334] "Generic (PLEG): container finished" podID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerID="8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe" exitCode=0 Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.217464 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b64008fd-61b4-43f0-9827-b6b25f2bc4be","Type":"ContainerDied","Data":"8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe"} Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.217484 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b64008fd-61b4-43f0-9827-b6b25f2bc4be","Type":"ContainerDied","Data":"9bde433b7cfa638fcbb2a958b08480c60da4de268987290e1fa9fe40ae932433"} Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.217589 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.239782 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thhjq\" (UniqueName: \"kubernetes.io/projected/73610b81-bd66-4f9a-8810-8176c084f6a4-kube-api-access-thhjq\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.239815 4773 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.239829 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.239841 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73610b81-bd66-4f9a-8810-8176c084f6a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.252680 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.256768 4773 scope.go:117] "RemoveContainer" containerID="ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.263059 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.285935 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.286552 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afb23a79-30ca-4764-8e4b-2dcf54c41fac" containerName="nova-manage" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286577 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="afb23a79-30ca-4764-8e4b-2dcf54c41fac" containerName="nova-manage" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.286604 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-metadata" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286613 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-metadata" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.286638 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-log" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286646 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-log" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.286656 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-api" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286663 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-api" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.286687 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-log" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286696 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-log" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286896 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-log" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286930 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="afb23a79-30ca-4764-8e4b-2dcf54c41fac" containerName="nova-manage" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286941 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-log" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286974 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" containerName="nova-metadata-metadata" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.286984 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" containerName="nova-api-api" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.288343 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.291479 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.291480 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.301795 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.306460 4773 scope.go:117] "RemoveContainer" containerID="8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.309474 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad\": container with ID starting with 8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad not found: ID does not exist" containerID="8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.309529 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad"} err="failed to get container status \"8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad\": rpc error: code = NotFound desc = could not find container \"8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad\": container with ID starting with 8de2aa9a11924715dd6b7855b582b6752f1722071578e6d6c8b199f8941abdad not found: ID does not exist" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.309557 4773 scope.go:117] "RemoveContainer" containerID="ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.312350 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a\": container with ID starting with ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a not found: ID does not exist" containerID="ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.312387 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a"} err="failed to get container status \"ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a\": rpc error: code = NotFound desc = could not find container \"ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a\": container with ID starting with ddd432ebcb92747208fbd780151f514d58a199d07c69b47fb1b01362b28fc39a not found: ID does not exist" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.312416 4773 scope.go:117] "RemoveContainer" containerID="8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.321302 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.335658 4773 scope.go:117] "RemoveContainer" containerID="a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.335804 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.358700 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.360528 4773 scope.go:117] "RemoveContainer" containerID="8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.360934 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.361087 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe\": container with ID starting with 8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe not found: ID does not exist" containerID="8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.361123 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe"} err="failed to get container status \"8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe\": rpc error: code = NotFound desc = could not find container \"8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe\": container with ID starting with 8003564859b9a21929f7a59ad1f39b84f5ab00c22c5b5772e36635736c809dfe not found: ID does not exist" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.361158 4773 scope.go:117] "RemoveContainer" containerID="a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569" Jan 22 13:34:15 crc kubenswrapper[4773]: E0122 13:34:15.361773 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569\": container with ID starting with a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569 not found: ID does not exist" containerID="a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.361793 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569"} err="failed to get container status \"a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569\": rpc error: code = NotFound desc = could not find container \"a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569\": container with ID starting with a959e3904e4e453059ac56aaf6f49beb333aec9341048b01717700b0051e5569 not found: ID does not exist" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.366741 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.386328 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.442865 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-logs\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.442953 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.443342 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kdfc\" (UniqueName: \"kubernetes.io/projected/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-kube-api-access-2kdfc\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.443589 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-config-data\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.444028 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546327 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546412 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7038732-f115-47e2-9c14-1937b15ca3e2-logs\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546446 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-logs\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546464 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-config-data\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546497 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546576 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546720 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kdfc\" (UniqueName: \"kubernetes.io/projected/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-kube-api-access-2kdfc\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546902 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-config-data\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.546958 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl5bh\" (UniqueName: \"kubernetes.io/projected/e7038732-f115-47e2-9c14-1937b15ca3e2-kube-api-access-zl5bh\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.547035 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-logs\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.550894 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-config-data\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.551459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.555770 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.568821 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kdfc\" (UniqueName: \"kubernetes.io/projected/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-kube-api-access-2kdfc\") pod \"nova-metadata-0\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.622176 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.648308 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl5bh\" (UniqueName: \"kubernetes.io/projected/e7038732-f115-47e2-9c14-1937b15ca3e2-kube-api-access-zl5bh\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.648436 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7038732-f115-47e2-9c14-1937b15ca3e2-logs\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.648489 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-config-data\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.648530 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.649450 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7038732-f115-47e2-9c14-1937b15ca3e2-logs\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.652428 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-config-data\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.652780 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.668346 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl5bh\" (UniqueName: \"kubernetes.io/projected/e7038732-f115-47e2-9c14-1937b15ca3e2-kube-api-access-zl5bh\") pod \"nova-api-0\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " pod="openstack/nova-api-0" Jan 22 13:34:15 crc kubenswrapper[4773]: I0122 13:34:15.680612 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.093179 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 13:34:16 crc kubenswrapper[4773]: W0122 13:34:16.181101 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7038732_f115_47e2_9c14_1937b15ca3e2.slice/crio-a4b2fe1ae437c373a9553578162a698bfbd57514d872ed775ff7a94933ff78c9 WatchSource:0}: Error finding container a4b2fe1ae437c373a9553578162a698bfbd57514d872ed775ff7a94933ff78c9: Status 404 returned error can't find the container with id a4b2fe1ae437c373a9553578162a698bfbd57514d872ed775ff7a94933ff78c9 Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.181317 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.238156 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4","Type":"ContainerStarted","Data":"ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636"} Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.238563 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4","Type":"ContainerStarted","Data":"cded10f49185053a7f631250a53f8627712af4c29f9f90eac233aa4ef9752e07"} Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.239946 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7038732-f115-47e2-9c14-1937b15ca3e2","Type":"ContainerStarted","Data":"a4b2fe1ae437c373a9553578162a698bfbd57514d872ed775ff7a94933ff78c9"} Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.669307 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73610b81-bd66-4f9a-8810-8176c084f6a4" path="/var/lib/kubelet/pods/73610b81-bd66-4f9a-8810-8176c084f6a4/volumes" Jan 22 13:34:16 crc kubenswrapper[4773]: I0122 13:34:16.669964 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b64008fd-61b4-43f0-9827-b6b25f2bc4be" path="/var/lib/kubelet/pods/b64008fd-61b4-43f0-9827-b6b25f2bc4be/volumes" Jan 22 13:34:17 crc kubenswrapper[4773]: I0122 13:34:17.255111 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4","Type":"ContainerStarted","Data":"7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778"} Jan 22 13:34:17 crc kubenswrapper[4773]: I0122 13:34:17.259404 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7038732-f115-47e2-9c14-1937b15ca3e2","Type":"ContainerStarted","Data":"01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d"} Jan 22 13:34:17 crc kubenswrapper[4773]: I0122 13:34:17.259439 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7038732-f115-47e2-9c14-1937b15ca3e2","Type":"ContainerStarted","Data":"0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706"} Jan 22 13:34:17 crc kubenswrapper[4773]: I0122 13:34:17.287771 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.287746893 podStartE2EDuration="2.287746893s" podCreationTimestamp="2026-01-22 13:34:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:34:17.27341727 +0000 UTC m=+5964.851533125" watchObservedRunningTime="2026-01-22 13:34:17.287746893 +0000 UTC m=+5964.865862718" Jan 22 13:34:17 crc kubenswrapper[4773]: I0122 13:34:17.300444 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.30042745 podStartE2EDuration="2.30042745s" podCreationTimestamp="2026-01-22 13:34:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:34:17.293829414 +0000 UTC m=+5964.871945279" watchObservedRunningTime="2026-01-22 13:34:17.30042745 +0000 UTC m=+5964.878543275" Jan 22 13:34:19 crc kubenswrapper[4773]: I0122 13:34:19.659637 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:34:19 crc kubenswrapper[4773]: E0122 13:34:19.660495 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:34:20 crc kubenswrapper[4773]: I0122 13:34:20.622747 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 13:34:20 crc kubenswrapper[4773]: I0122 13:34:20.622829 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.352210 4773 generic.go:334] "Generic (PLEG): container finished" podID="80dfc6ef-7650-4890-b457-9e221da5fc24" containerID="95191aa98448678d853ea30ef9e272b61203d87a0acacceeb8f937995f4ea5a5" exitCode=137 Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.352698 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"80dfc6ef-7650-4890-b457-9e221da5fc24","Type":"ContainerDied","Data":"95191aa98448678d853ea30ef9e272b61203d87a0acacceeb8f937995f4ea5a5"} Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.543003 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.623075 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.623130 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.653075 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-config-data\") pod \"80dfc6ef-7650-4890-b457-9e221da5fc24\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.653268 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-combined-ca-bundle\") pod \"80dfc6ef-7650-4890-b457-9e221da5fc24\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.653329 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4wdq\" (UniqueName: \"kubernetes.io/projected/80dfc6ef-7650-4890-b457-9e221da5fc24-kube-api-access-g4wdq\") pod \"80dfc6ef-7650-4890-b457-9e221da5fc24\" (UID: \"80dfc6ef-7650-4890-b457-9e221da5fc24\") " Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.660049 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80dfc6ef-7650-4890-b457-9e221da5fc24-kube-api-access-g4wdq" (OuterVolumeSpecName: "kube-api-access-g4wdq") pod "80dfc6ef-7650-4890-b457-9e221da5fc24" (UID: "80dfc6ef-7650-4890-b457-9e221da5fc24"). InnerVolumeSpecName "kube-api-access-g4wdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.681556 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.681639 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.682598 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80dfc6ef-7650-4890-b457-9e221da5fc24" (UID: "80dfc6ef-7650-4890-b457-9e221da5fc24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.684032 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-config-data" (OuterVolumeSpecName: "config-data") pod "80dfc6ef-7650-4890-b457-9e221da5fc24" (UID: "80dfc6ef-7650-4890-b457-9e221da5fc24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.756421 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.756466 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4wdq\" (UniqueName: \"kubernetes.io/projected/80dfc6ef-7650-4890-b457-9e221da5fc24-kube-api-access-g4wdq\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:25 crc kubenswrapper[4773]: I0122 13:34:25.756612 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80dfc6ef-7650-4890-b457-9e221da5fc24-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.363142 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"80dfc6ef-7650-4890-b457-9e221da5fc24","Type":"ContainerDied","Data":"3afd79ca5b9c231d8a1826ecb147d44af4efca813a8f9c33a26566f3254f2621"} Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.363495 4773 scope.go:117] "RemoveContainer" containerID="95191aa98448678d853ea30ef9e272b61203d87a0acacceeb8f937995f4ea5a5" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.363185 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.395297 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.407088 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.418823 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:34:26 crc kubenswrapper[4773]: E0122 13:34:26.419259 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80dfc6ef-7650-4890-b457-9e221da5fc24" containerName="nova-scheduler-scheduler" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.419277 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="80dfc6ef-7650-4890-b457-9e221da5fc24" containerName="nova-scheduler-scheduler" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.419471 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="80dfc6ef-7650-4890-b457-9e221da5fc24" containerName="nova-scheduler-scheduler" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.420308 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.423927 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.436254 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.574110 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5scj\" (UniqueName: \"kubernetes.io/projected/5ac70a4f-4177-4f6b-a676-3e8c635c766a-kube-api-access-j5scj\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.574202 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.574276 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.638654 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.96:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.638945 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.96:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.668077 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80dfc6ef-7650-4890-b457-9e221da5fc24" path="/var/lib/kubelet/pods/80dfc6ef-7650-4890-b457-9e221da5fc24/volumes" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.675694 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5scj\" (UniqueName: \"kubernetes.io/projected/5ac70a4f-4177-4f6b-a676-3e8c635c766a-kube-api-access-j5scj\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.675762 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.675825 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.682049 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.694566 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.699662 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5scj\" (UniqueName: \"kubernetes.io/projected/5ac70a4f-4177-4f6b-a676-3e8c635c766a-kube-api-access-j5scj\") pod \"nova-scheduler-0\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.738365 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.767480 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.97:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:26 crc kubenswrapper[4773]: I0122 13:34:26.767480 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.97:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:27 crc kubenswrapper[4773]: I0122 13:34:27.246734 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 13:34:27 crc kubenswrapper[4773]: I0122 13:34:27.379530 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5ac70a4f-4177-4f6b-a676-3e8c635c766a","Type":"ContainerStarted","Data":"e667ff4aff6f87772e778bfc2b2b2b44597796903cd6a3e00d650273e884ab87"} Jan 22 13:34:28 crc kubenswrapper[4773]: I0122 13:34:28.392666 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5ac70a4f-4177-4f6b-a676-3e8c635c766a","Type":"ContainerStarted","Data":"364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd"} Jan 22 13:34:28 crc kubenswrapper[4773]: I0122 13:34:28.419427 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.419245589 podStartE2EDuration="2.419245589s" podCreationTimestamp="2026-01-22 13:34:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:34:28.408327752 +0000 UTC m=+5975.986443597" watchObservedRunningTime="2026-01-22 13:34:28.419245589 +0000 UTC m=+5975.997361434" Jan 22 13:34:30 crc kubenswrapper[4773]: I0122 13:34:30.658585 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:34:30 crc kubenswrapper[4773]: E0122 13:34:30.659113 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:34:31 crc kubenswrapper[4773]: I0122 13:34:31.739134 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.630822 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.632137 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.643605 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.686485 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.686623 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.687051 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.687093 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.692549 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.694333 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.936350 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84dd694dd5-rdt6v"] Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.941045 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:35 crc kubenswrapper[4773]: I0122 13:34:35.967566 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84dd694dd5-rdt6v"] Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.086060 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-config\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.086124 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-dns-svc\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.086184 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-sb\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.086269 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5mff\" (UniqueName: \"kubernetes.io/projected/9ea3757f-a094-4d6b-bacf-fb311010aa70-kube-api-access-w5mff\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.086389 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-nb\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.188919 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5mff\" (UniqueName: \"kubernetes.io/projected/9ea3757f-a094-4d6b-bacf-fb311010aa70-kube-api-access-w5mff\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.189016 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-nb\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.189078 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-config\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.189119 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-dns-svc\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.189161 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-sb\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.190034 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-nb\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.190066 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-sb\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.190273 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-dns-svc\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.190945 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-config\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.216329 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5mff\" (UniqueName: \"kubernetes.io/projected/9ea3757f-a094-4d6b-bacf-fb311010aa70-kube-api-access-w5mff\") pod \"dnsmasq-dns-84dd694dd5-rdt6v\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.293337 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.602013 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.741561 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.779075 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 13:34:36 crc kubenswrapper[4773]: I0122 13:34:36.795307 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84dd694dd5-rdt6v"] Jan 22 13:34:37 crc kubenswrapper[4773]: I0122 13:34:37.499160 4773 generic.go:334] "Generic (PLEG): container finished" podID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerID="676844cb2c1bd7e6b94965b73fdf95066f05be93de81298d3c210f5edbcab3a1" exitCode=0 Jan 22 13:34:37 crc kubenswrapper[4773]: I0122 13:34:37.499231 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" event={"ID":"9ea3757f-a094-4d6b-bacf-fb311010aa70","Type":"ContainerDied","Data":"676844cb2c1bd7e6b94965b73fdf95066f05be93de81298d3c210f5edbcab3a1"} Jan 22 13:34:37 crc kubenswrapper[4773]: I0122 13:34:37.499660 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" event={"ID":"9ea3757f-a094-4d6b-bacf-fb311010aa70","Type":"ContainerStarted","Data":"933c103807f5ed90978bd9592c7e70da1bfdbd41ff710ab3d3e12b9c9fad240f"} Jan 22 13:34:37 crc kubenswrapper[4773]: I0122 13:34:37.542366 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 13:34:38 crc kubenswrapper[4773]: I0122 13:34:38.516520 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" event={"ID":"9ea3757f-a094-4d6b-bacf-fb311010aa70","Type":"ContainerStarted","Data":"5ebb7cb37f0a3a7ee83cf0d45c687d11533039b4a8a029f5232308bafb1e6a5b"} Jan 22 13:34:38 crc kubenswrapper[4773]: I0122 13:34:38.516900 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:38 crc kubenswrapper[4773]: I0122 13:34:38.535434 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" podStartSLOduration=3.535417702 podStartE2EDuration="3.535417702s" podCreationTimestamp="2026-01-22 13:34:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:34:38.533778226 +0000 UTC m=+5986.111894051" watchObservedRunningTime="2026-01-22 13:34:38.535417702 +0000 UTC m=+5986.113533527" Jan 22 13:34:38 crc kubenswrapper[4773]: I0122 13:34:38.750666 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:38 crc kubenswrapper[4773]: I0122 13:34:38.751153 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-api" containerID="cri-o://01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d" gracePeriod=30 Jan 22 13:34:38 crc kubenswrapper[4773]: I0122 13:34:38.751541 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-log" containerID="cri-o://0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706" gracePeriod=30 Jan 22 13:34:39 crc kubenswrapper[4773]: I0122 13:34:39.523983 4773 generic.go:334] "Generic (PLEG): container finished" podID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerID="0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706" exitCode=143 Jan 22 13:34:39 crc kubenswrapper[4773]: I0122 13:34:39.524073 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7038732-f115-47e2-9c14-1937b15ca3e2","Type":"ContainerDied","Data":"0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706"} Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.416268 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.549703 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-combined-ca-bundle\") pod \"e7038732-f115-47e2-9c14-1937b15ca3e2\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.550121 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-config-data\") pod \"e7038732-f115-47e2-9c14-1937b15ca3e2\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.550210 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl5bh\" (UniqueName: \"kubernetes.io/projected/e7038732-f115-47e2-9c14-1937b15ca3e2-kube-api-access-zl5bh\") pod \"e7038732-f115-47e2-9c14-1937b15ca3e2\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.550315 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7038732-f115-47e2-9c14-1937b15ca3e2-logs\") pod \"e7038732-f115-47e2-9c14-1937b15ca3e2\" (UID: \"e7038732-f115-47e2-9c14-1937b15ca3e2\") " Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.551218 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7038732-f115-47e2-9c14-1937b15ca3e2-logs" (OuterVolumeSpecName: "logs") pod "e7038732-f115-47e2-9c14-1937b15ca3e2" (UID: "e7038732-f115-47e2-9c14-1937b15ca3e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.573340 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7038732-f115-47e2-9c14-1937b15ca3e2-kube-api-access-zl5bh" (OuterVolumeSpecName: "kube-api-access-zl5bh") pod "e7038732-f115-47e2-9c14-1937b15ca3e2" (UID: "e7038732-f115-47e2-9c14-1937b15ca3e2"). InnerVolumeSpecName "kube-api-access-zl5bh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.576622 4773 generic.go:334] "Generic (PLEG): container finished" podID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerID="01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d" exitCode=0 Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.576675 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7038732-f115-47e2-9c14-1937b15ca3e2","Type":"ContainerDied","Data":"01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d"} Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.576711 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e7038732-f115-47e2-9c14-1937b15ca3e2","Type":"ContainerDied","Data":"a4b2fe1ae437c373a9553578162a698bfbd57514d872ed775ff7a94933ff78c9"} Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.576737 4773 scope.go:117] "RemoveContainer" containerID="01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.576921 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.580439 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-config-data" (OuterVolumeSpecName: "config-data") pod "e7038732-f115-47e2-9c14-1937b15ca3e2" (UID: "e7038732-f115-47e2-9c14-1937b15ca3e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.588986 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7038732-f115-47e2-9c14-1937b15ca3e2" (UID: "e7038732-f115-47e2-9c14-1937b15ca3e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.652989 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.653024 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7038732-f115-47e2-9c14-1937b15ca3e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.653034 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl5bh\" (UniqueName: \"kubernetes.io/projected/e7038732-f115-47e2-9c14-1937b15ca3e2-kube-api-access-zl5bh\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.653044 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7038732-f115-47e2-9c14-1937b15ca3e2-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.677570 4773 scope.go:117] "RemoveContainer" containerID="0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.710226 4773 scope.go:117] "RemoveContainer" containerID="01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d" Jan 22 13:34:42 crc kubenswrapper[4773]: E0122 13:34:42.711165 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d\": container with ID starting with 01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d not found: ID does not exist" containerID="01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.711234 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d"} err="failed to get container status \"01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d\": rpc error: code = NotFound desc = could not find container \"01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d\": container with ID starting with 01aca507d770fde21a60857423dd2e119e795b6ca45c44c5663a27727ef56d3d not found: ID does not exist" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.711265 4773 scope.go:117] "RemoveContainer" containerID="0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706" Jan 22 13:34:42 crc kubenswrapper[4773]: E0122 13:34:42.712162 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706\": container with ID starting with 0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706 not found: ID does not exist" containerID="0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.712456 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706"} err="failed to get container status \"0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706\": rpc error: code = NotFound desc = could not find container \"0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706\": container with ID starting with 0ecb02d5d6e9b4557621dbabda4a26c1bcf0c2b102522246325dad9d7daf6706 not found: ID does not exist" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.905447 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.919013 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.941824 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:42 crc kubenswrapper[4773]: E0122 13:34:42.942332 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-log" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.942354 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-log" Jan 22 13:34:42 crc kubenswrapper[4773]: E0122 13:34:42.942376 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-api" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.942384 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-api" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.946787 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-log" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.946851 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" containerName="nova-api-api" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.948268 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.951775 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.951881 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.962943 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 22 13:34:42 crc kubenswrapper[4773]: I0122 13:34:42.972366 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.061158 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c19119e-6281-4d1b-ac5e-599d1fb52c95-logs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.061281 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sncrb\" (UniqueName: \"kubernetes.io/projected/9c19119e-6281-4d1b-ac5e-599d1fb52c95-kube-api-access-sncrb\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.061346 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.061364 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.061427 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-public-tls-certs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.061464 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-config-data\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163411 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c19119e-6281-4d1b-ac5e-599d1fb52c95-logs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163563 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sncrb\" (UniqueName: \"kubernetes.io/projected/9c19119e-6281-4d1b-ac5e-599d1fb52c95-kube-api-access-sncrb\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163604 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163624 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163670 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-public-tls-certs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163710 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-config-data\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.163873 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c19119e-6281-4d1b-ac5e-599d1fb52c95-logs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.169275 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.170500 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-config-data\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.175893 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-public-tls-certs\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.181153 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.188330 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sncrb\" (UniqueName: \"kubernetes.io/projected/9c19119e-6281-4d1b-ac5e-599d1fb52c95-kube-api-access-sncrb\") pod \"nova-api-0\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.264735 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 13:34:43 crc kubenswrapper[4773]: I0122 13:34:43.704693 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 13:34:44 crc kubenswrapper[4773]: I0122 13:34:44.604006 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c19119e-6281-4d1b-ac5e-599d1fb52c95","Type":"ContainerStarted","Data":"09b19ac9cd4cb5d11572c1d77637629883fee5390c4613ceff7dc60aeb60034b"} Jan 22 13:34:44 crc kubenswrapper[4773]: I0122 13:34:44.604271 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c19119e-6281-4d1b-ac5e-599d1fb52c95","Type":"ContainerStarted","Data":"249f13e2ffe294237aadae46ca90d1274b32f82a5df5bd44c2bce85e7524f918"} Jan 22 13:34:44 crc kubenswrapper[4773]: I0122 13:34:44.604294 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c19119e-6281-4d1b-ac5e-599d1fb52c95","Type":"ContainerStarted","Data":"b9418d4e0e2e5f37379e288479a7edc7ffdc1b3cab59da0ad2d6b0fab964c5dc"} Jan 22 13:34:44 crc kubenswrapper[4773]: I0122 13:34:44.635975 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.635945232 podStartE2EDuration="2.635945232s" podCreationTimestamp="2026-01-22 13:34:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:34:44.632706571 +0000 UTC m=+5992.210822396" watchObservedRunningTime="2026-01-22 13:34:44.635945232 +0000 UTC m=+5992.214061067" Jan 22 13:34:44 crc kubenswrapper[4773]: I0122 13:34:44.659624 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:34:44 crc kubenswrapper[4773]: E0122 13:34:44.659892 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:34:44 crc kubenswrapper[4773]: I0122 13:34:44.667857 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7038732-f115-47e2-9c14-1937b15ca3e2" path="/var/lib/kubelet/pods/e7038732-f115-47e2-9c14-1937b15ca3e2/volumes" Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.295477 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.377412 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-594b9845b9-7sx28"] Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.377697 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="dnsmasq-dns" containerID="cri-o://655eacce59359ca3c618cff463d46066770147c9365189c7b284255618a30bec" gracePeriod=10 Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.487088 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.88:5353: connect: connection refused" Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.627653 4773 generic.go:334] "Generic (PLEG): container finished" podID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerID="655eacce59359ca3c618cff463d46066770147c9365189c7b284255618a30bec" exitCode=0 Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.627704 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" event={"ID":"56f9dd78-8e18-4b35-bacb-7159d1aefb47","Type":"ContainerDied","Data":"655eacce59359ca3c618cff463d46066770147c9365189c7b284255618a30bec"} Jan 22 13:34:46 crc kubenswrapper[4773]: I0122 13:34:46.913160 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.036266 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-nb\") pod \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.036343 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsjtq\" (UniqueName: \"kubernetes.io/projected/56f9dd78-8e18-4b35-bacb-7159d1aefb47-kube-api-access-dsjtq\") pod \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.036417 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-dns-svc\") pod \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.036555 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-config\") pod \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.036580 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-sb\") pod \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\" (UID: \"56f9dd78-8e18-4b35-bacb-7159d1aefb47\") " Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.046119 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56f9dd78-8e18-4b35-bacb-7159d1aefb47-kube-api-access-dsjtq" (OuterVolumeSpecName: "kube-api-access-dsjtq") pod "56f9dd78-8e18-4b35-bacb-7159d1aefb47" (UID: "56f9dd78-8e18-4b35-bacb-7159d1aefb47"). InnerVolumeSpecName "kube-api-access-dsjtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.138481 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsjtq\" (UniqueName: \"kubernetes.io/projected/56f9dd78-8e18-4b35-bacb-7159d1aefb47-kube-api-access-dsjtq\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.183364 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56f9dd78-8e18-4b35-bacb-7159d1aefb47" (UID: "56f9dd78-8e18-4b35-bacb-7159d1aefb47"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.190990 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "56f9dd78-8e18-4b35-bacb-7159d1aefb47" (UID: "56f9dd78-8e18-4b35-bacb-7159d1aefb47"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.196649 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-config" (OuterVolumeSpecName: "config") pod "56f9dd78-8e18-4b35-bacb-7159d1aefb47" (UID: "56f9dd78-8e18-4b35-bacb-7159d1aefb47"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.198587 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "56f9dd78-8e18-4b35-bacb-7159d1aefb47" (UID: "56f9dd78-8e18-4b35-bacb-7159d1aefb47"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.240464 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.240498 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.240513 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.240527 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56f9dd78-8e18-4b35-bacb-7159d1aefb47-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.638123 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" event={"ID":"56f9dd78-8e18-4b35-bacb-7159d1aefb47","Type":"ContainerDied","Data":"723e14c2d1ea0f67a2a1de743d36f578b31a003cc9f7ef5c74030fbb260b58aa"} Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.638174 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-594b9845b9-7sx28" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.638258 4773 scope.go:117] "RemoveContainer" containerID="655eacce59359ca3c618cff463d46066770147c9365189c7b284255618a30bec" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.664004 4773 scope.go:117] "RemoveContainer" containerID="909c7a95447e8dbf869b3653a7d6600c03acbe0dc5c0042c5482eb142a67ae0c" Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.676851 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-594b9845b9-7sx28"] Jan 22 13:34:47 crc kubenswrapper[4773]: I0122 13:34:47.686444 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-594b9845b9-7sx28"] Jan 22 13:34:48 crc kubenswrapper[4773]: I0122 13:34:48.669598 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" path="/var/lib/kubelet/pods/56f9dd78-8e18-4b35-bacb-7159d1aefb47/volumes" Jan 22 13:34:53 crc kubenswrapper[4773]: I0122 13:34:53.265341 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 13:34:53 crc kubenswrapper[4773]: I0122 13:34:53.266120 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 13:34:54 crc kubenswrapper[4773]: I0122 13:34:54.288527 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.100:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:54 crc kubenswrapper[4773]: I0122 13:34:54.288614 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.100:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 13:34:56 crc kubenswrapper[4773]: I0122 13:34:56.657994 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:34:56 crc kubenswrapper[4773]: E0122 13:34:56.658578 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:35:03 crc kubenswrapper[4773]: I0122 13:35:03.273187 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 13:35:03 crc kubenswrapper[4773]: I0122 13:35:03.275206 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 13:35:03 crc kubenswrapper[4773]: I0122 13:35:03.279611 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 13:35:03 crc kubenswrapper[4773]: I0122 13:35:03.281510 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 13:35:03 crc kubenswrapper[4773]: I0122 13:35:03.814748 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 13:35:03 crc kubenswrapper[4773]: I0122 13:35:03.822023 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 13:35:05 crc kubenswrapper[4773]: I0122 13:35:05.404328 4773 scope.go:117] "RemoveContainer" containerID="9fc64e39289e259d8a2b9160916a03bedc174d8e61615a18ec7881231bb6e63e" Jan 22 13:35:05 crc kubenswrapper[4773]: I0122 13:35:05.429050 4773 scope.go:117] "RemoveContainer" containerID="2b2c87b3fdf746d3f2dec3fe7ba7a8d9353f7e371fd2acca35efd04d91d02264" Jan 22 13:35:11 crc kubenswrapper[4773]: I0122 13:35:11.661599 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:35:12 crc kubenswrapper[4773]: I0122 13:35:12.931460 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"cd11c4ae3e84c7a53f963cff7bf88747afac3c498d4204f8269ad676fea8c32b"} Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.718841 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-v6vjx"] Jan 22 13:35:32 crc kubenswrapper[4773]: E0122 13:35:32.719895 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="dnsmasq-dns" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.719966 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="dnsmasq-dns" Jan 22 13:35:32 crc kubenswrapper[4773]: E0122 13:35:32.719981 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="init" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.719989 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="init" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.720272 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="56f9dd78-8e18-4b35-bacb-7159d1aefb47" containerName="dnsmasq-dns" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.721186 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.724595 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-fh8rh" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.724668 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.724682 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.754318 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v6vjx"] Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.770311 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5m9h5"] Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.779393 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.813424 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5m9h5"] Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892388 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-etc-ovs\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892438 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9cd77c-960b-41ac-b305-9f79452beb10-scripts\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892477 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-lib\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892504 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-log-ovn\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892570 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-combined-ca-bundle\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892613 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-scripts\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892641 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-run\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892671 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-run\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892734 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-ovn-controller-tls-certs\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892773 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-log\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892801 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-run-ovn\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892872 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt8jj\" (UniqueName: \"kubernetes.io/projected/3e9cd77c-960b-41ac-b305-9f79452beb10-kube-api-access-pt8jj\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:32 crc kubenswrapper[4773]: I0122 13:35:32.892898 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbmz6\" (UniqueName: \"kubernetes.io/projected/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-kube-api-access-gbmz6\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151584 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-scripts\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151708 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-run\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151763 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-run\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151819 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-ovn-controller-tls-certs\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151867 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-log\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151911 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-run-ovn\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.151966 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt8jj\" (UniqueName: \"kubernetes.io/projected/3e9cd77c-960b-41ac-b305-9f79452beb10-kube-api-access-pt8jj\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.152007 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbmz6\" (UniqueName: \"kubernetes.io/projected/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-kube-api-access-gbmz6\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.152106 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-etc-ovs\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.152160 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9cd77c-960b-41ac-b305-9f79452beb10-scripts\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.152198 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-lib\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.152240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-log-ovn\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.152277 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-combined-ca-bundle\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.156337 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-run-ovn\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.158015 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-run\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.158140 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-etc-ovs\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.158252 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-log\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.160006 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-scripts\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.160051 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-run\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.160089 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-var-log-ovn\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.160493 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3e9cd77c-960b-41ac-b305-9f79452beb10-scripts\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.162456 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-combined-ca-bundle\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.170983 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-ovn-controller-tls-certs\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.172447 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3e9cd77c-960b-41ac-b305-9f79452beb10-var-lib\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.185271 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbmz6\" (UniqueName: \"kubernetes.io/projected/ed191d8f-9eb0-4bc6-aa42-62ea1d99657a-kube-api-access-gbmz6\") pod \"ovn-controller-v6vjx\" (UID: \"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a\") " pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.199799 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt8jj\" (UniqueName: \"kubernetes.io/projected/3e9cd77c-960b-41ac-b305-9f79452beb10-kube-api-access-pt8jj\") pod \"ovn-controller-ovs-5m9h5\" (UID: \"3e9cd77c-960b-41ac-b305-9f79452beb10\") " pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.349557 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.416615 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:33 crc kubenswrapper[4773]: I0122 13:35:33.845381 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v6vjx"] Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.325359 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5m9h5"] Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.384511 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx" event={"ID":"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a","Type":"ContainerStarted","Data":"8abd1861ca3968374effdba8e2b107b21ab4293b6e687218213d99adecabc12b"} Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.384937 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-v6vjx" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.386729 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m9h5" event={"ID":"3e9cd77c-960b-41ac-b305-9f79452beb10","Type":"ContainerStarted","Data":"2d78f5f17ae32674452c0d469d52ea2a93c440cceacfe6ff17906d3e080898c5"} Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.415395 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-v6vjx" podStartSLOduration=2.41535649 podStartE2EDuration="2.41535649s" podCreationTimestamp="2026-01-22 13:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:35:34.409172117 +0000 UTC m=+6041.987287952" watchObservedRunningTime="2026-01-22 13:35:34.41535649 +0000 UTC m=+6041.993472335" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.485014 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-vxlqf"] Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.487068 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.490712 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.525609 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-vxlqf"] Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.557676 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-combined-ca-bundle\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.557777 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rb2l\" (UniqueName: \"kubernetes.io/projected/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-kube-api-access-2rb2l\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.557871 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-ovn-rundir\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.558218 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.558421 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-ovs-rundir\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.558541 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-config\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.661368 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rb2l\" (UniqueName: \"kubernetes.io/projected/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-kube-api-access-2rb2l\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.661857 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-ovn-rundir\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.662045 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.662214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-ovs-rundir\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.662591 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-config\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.662936 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-combined-ca-bundle\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.664598 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-ovn-rundir\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.664714 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-ovs-rundir\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.664724 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-config\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.669518 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.671687 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-combined-ca-bundle\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.682443 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rb2l\" (UniqueName: \"kubernetes.io/projected/50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0-kube-api-access-2rb2l\") pod \"ovn-controller-metrics-vxlqf\" (UID: \"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0\") " pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:34 crc kubenswrapper[4773]: I0122 13:35:34.831504 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vxlqf" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.065741 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-c30b-account-create-update-gsngt"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.087204 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-9psgh"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.098240 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-9psgh"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.106249 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-c30b-account-create-update-gsngt"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.253836 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-skl29"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.255385 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.264411 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-skl29"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.350017 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-vxlqf"] Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.388332 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szlgp\" (UniqueName: \"kubernetes.io/projected/78084244-a181-41ba-9993-87a0d3014a91-kube-api-access-szlgp\") pod \"octavia-db-create-skl29\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.388421 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78084244-a181-41ba-9993-87a0d3014a91-operator-scripts\") pod \"octavia-db-create-skl29\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.397028 4773 generic.go:334] "Generic (PLEG): container finished" podID="3e9cd77c-960b-41ac-b305-9f79452beb10" containerID="0f0211ad47cc58152904af93fe1c3766711e85c48e44309be22ce577f86dacd4" exitCode=0 Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.398018 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m9h5" event={"ID":"3e9cd77c-960b-41ac-b305-9f79452beb10","Type":"ContainerDied","Data":"0f0211ad47cc58152904af93fe1c3766711e85c48e44309be22ce577f86dacd4"} Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.400122 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vxlqf" event={"ID":"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0","Type":"ContainerStarted","Data":"860ebe94b2825b1cae89d746dd336b5c145f9d36cef14ca576d285b7e912917b"} Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.402333 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx" event={"ID":"ed191d8f-9eb0-4bc6-aa42-62ea1d99657a","Type":"ContainerStarted","Data":"766665707ae7e437ffc21569b7f69e7008a468e4b03c1135d282604c3b58b83f"} Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.490163 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szlgp\" (UniqueName: \"kubernetes.io/projected/78084244-a181-41ba-9993-87a0d3014a91-kube-api-access-szlgp\") pod \"octavia-db-create-skl29\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.490240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78084244-a181-41ba-9993-87a0d3014a91-operator-scripts\") pod \"octavia-db-create-skl29\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.491220 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78084244-a181-41ba-9993-87a0d3014a91-operator-scripts\") pod \"octavia-db-create-skl29\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.512115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szlgp\" (UniqueName: \"kubernetes.io/projected/78084244-a181-41ba-9993-87a0d3014a91-kube-api-access-szlgp\") pod \"octavia-db-create-skl29\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " pod="openstack/octavia-db-create-skl29" Jan 22 13:35:35 crc kubenswrapper[4773]: I0122 13:35:35.579197 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-skl29" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.078861 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-skl29"] Jan 22 13:35:36 crc kubenswrapper[4773]: W0122 13:35:36.085106 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78084244_a181_41ba_9993_87a0d3014a91.slice/crio-7d2cac81723dd52cf18975f38167be87c356a46c70b2f272637310808a4af6ac WatchSource:0}: Error finding container 7d2cac81723dd52cf18975f38167be87c356a46c70b2f272637310808a4af6ac: Status 404 returned error can't find the container with id 7d2cac81723dd52cf18975f38167be87c356a46c70b2f272637310808a4af6ac Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.263604 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-1767-account-create-update-h75t5"] Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.268251 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.271183 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.288914 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-1767-account-create-update-h75t5"] Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.319897 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d6481c-aa92-4c21-8d99-7154c471951b-operator-scripts\") pod \"octavia-1767-account-create-update-h75t5\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.319946 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qb64\" (UniqueName: \"kubernetes.io/projected/d8d6481c-aa92-4c21-8d99-7154c471951b-kube-api-access-4qb64\") pod \"octavia-1767-account-create-update-h75t5\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.412174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vxlqf" event={"ID":"50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0","Type":"ContainerStarted","Data":"fdc7825bfc1ad7296a93a8f32f44e3bc5bfe1342297f478eeeaff1f891df136d"} Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.414839 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-skl29" event={"ID":"78084244-a181-41ba-9993-87a0d3014a91","Type":"ContainerStarted","Data":"7d2cac81723dd52cf18975f38167be87c356a46c70b2f272637310808a4af6ac"} Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.422871 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d6481c-aa92-4c21-8d99-7154c471951b-operator-scripts\") pod \"octavia-1767-account-create-update-h75t5\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.422950 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qb64\" (UniqueName: \"kubernetes.io/projected/d8d6481c-aa92-4c21-8d99-7154c471951b-kube-api-access-4qb64\") pod \"octavia-1767-account-create-update-h75t5\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.424183 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d6481c-aa92-4c21-8d99-7154c471951b-operator-scripts\") pod \"octavia-1767-account-create-update-h75t5\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.436797 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m9h5" event={"ID":"3e9cd77c-960b-41ac-b305-9f79452beb10","Type":"ContainerStarted","Data":"e8cc03714a9dd93d553bfd28080d64b19baf7cb489e1a8252db30896fc6d7930"} Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.436876 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.436895 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5m9h5" event={"ID":"3e9cd77c-960b-41ac-b305-9f79452beb10","Type":"ContainerStarted","Data":"24c42be51fc1dff9dea79192488d9d9c405e7e44cf9e20707f24a95fd0e98824"} Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.437720 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.445923 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qb64\" (UniqueName: \"kubernetes.io/projected/d8d6481c-aa92-4c21-8d99-7154c471951b-kube-api-access-4qb64\") pod \"octavia-1767-account-create-update-h75t5\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.450926 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-vxlqf" podStartSLOduration=2.450897341 podStartE2EDuration="2.450897341s" podCreationTimestamp="2026-01-22 13:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:35:36.438510473 +0000 UTC m=+6044.016626308" watchObservedRunningTime="2026-01-22 13:35:36.450897341 +0000 UTC m=+6044.029013176" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.478565 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5m9h5" podStartSLOduration=4.478534267 podStartE2EDuration="4.478534267s" podCreationTimestamp="2026-01-22 13:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:35:36.475662037 +0000 UTC m=+6044.053777872" watchObservedRunningTime="2026-01-22 13:35:36.478534267 +0000 UTC m=+6044.056650092" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.622370 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.670386 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96667553-15cb-4ca6-88e4-2a1cd4da88df" path="/var/lib/kubelet/pods/96667553-15cb-4ca6-88e4-2a1cd4da88df/volumes" Jan 22 13:35:36 crc kubenswrapper[4773]: I0122 13:35:36.671210 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e821c273-29fb-417c-a66c-e216a2b67666" path="/var/lib/kubelet/pods/e821c273-29fb-417c-a66c-e216a2b67666/volumes" Jan 22 13:35:37 crc kubenswrapper[4773]: I0122 13:35:37.092560 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-1767-account-create-update-h75t5"] Jan 22 13:35:37 crc kubenswrapper[4773]: W0122 13:35:37.095902 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8d6481c_aa92_4c21_8d99_7154c471951b.slice/crio-a023fb54e79bfdf25709f5fac46b61ace644c3649705b738d23f22dc0d1b65a7 WatchSource:0}: Error finding container a023fb54e79bfdf25709f5fac46b61ace644c3649705b738d23f22dc0d1b65a7: Status 404 returned error can't find the container with id a023fb54e79bfdf25709f5fac46b61ace644c3649705b738d23f22dc0d1b65a7 Jan 22 13:35:37 crc kubenswrapper[4773]: I0122 13:35:37.445844 4773 generic.go:334] "Generic (PLEG): container finished" podID="78084244-a181-41ba-9993-87a0d3014a91" containerID="00d3da605324b27d5aceb91feea0f3cf173f8d9a75f4683662d6bed642c24fc1" exitCode=0 Jan 22 13:35:37 crc kubenswrapper[4773]: I0122 13:35:37.446110 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-skl29" event={"ID":"78084244-a181-41ba-9993-87a0d3014a91","Type":"ContainerDied","Data":"00d3da605324b27d5aceb91feea0f3cf173f8d9a75f4683662d6bed642c24fc1"} Jan 22 13:35:37 crc kubenswrapper[4773]: I0122 13:35:37.450153 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-1767-account-create-update-h75t5" event={"ID":"d8d6481c-aa92-4c21-8d99-7154c471951b","Type":"ContainerStarted","Data":"66ebdeec548397d984c050c848e7c04e51a645ec48b00c8bf99c916589f2c4f7"} Jan 22 13:35:37 crc kubenswrapper[4773]: I0122 13:35:37.450186 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-1767-account-create-update-h75t5" event={"ID":"d8d6481c-aa92-4c21-8d99-7154c471951b","Type":"ContainerStarted","Data":"a023fb54e79bfdf25709f5fac46b61ace644c3649705b738d23f22dc0d1b65a7"} Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.458461 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8d6481c-aa92-4c21-8d99-7154c471951b" containerID="66ebdeec548397d984c050c848e7c04e51a645ec48b00c8bf99c916589f2c4f7" exitCode=0 Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.458518 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-1767-account-create-update-h75t5" event={"ID":"d8d6481c-aa92-4c21-8d99-7154c471951b","Type":"ContainerDied","Data":"66ebdeec548397d984c050c848e7c04e51a645ec48b00c8bf99c916589f2c4f7"} Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.839995 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-skl29" Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.880666 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szlgp\" (UniqueName: \"kubernetes.io/projected/78084244-a181-41ba-9993-87a0d3014a91-kube-api-access-szlgp\") pod \"78084244-a181-41ba-9993-87a0d3014a91\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.893795 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78084244-a181-41ba-9993-87a0d3014a91-kube-api-access-szlgp" (OuterVolumeSpecName: "kube-api-access-szlgp") pod "78084244-a181-41ba-9993-87a0d3014a91" (UID: "78084244-a181-41ba-9993-87a0d3014a91"). InnerVolumeSpecName "kube-api-access-szlgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.982156 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78084244-a181-41ba-9993-87a0d3014a91-operator-scripts\") pod \"78084244-a181-41ba-9993-87a0d3014a91\" (UID: \"78084244-a181-41ba-9993-87a0d3014a91\") " Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.982609 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szlgp\" (UniqueName: \"kubernetes.io/projected/78084244-a181-41ba-9993-87a0d3014a91-kube-api-access-szlgp\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:38 crc kubenswrapper[4773]: I0122 13:35:38.982924 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78084244-a181-41ba-9993-87a0d3014a91-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "78084244-a181-41ba-9993-87a0d3014a91" (UID: "78084244-a181-41ba-9993-87a0d3014a91"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.085173 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78084244-a181-41ba-9993-87a0d3014a91-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.474677 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-skl29" Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.474673 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-skl29" event={"ID":"78084244-a181-41ba-9993-87a0d3014a91","Type":"ContainerDied","Data":"7d2cac81723dd52cf18975f38167be87c356a46c70b2f272637310808a4af6ac"} Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.474802 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d2cac81723dd52cf18975f38167be87c356a46c70b2f272637310808a4af6ac" Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.810903 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.999315 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qb64\" (UniqueName: \"kubernetes.io/projected/d8d6481c-aa92-4c21-8d99-7154c471951b-kube-api-access-4qb64\") pod \"d8d6481c-aa92-4c21-8d99-7154c471951b\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " Jan 22 13:35:39 crc kubenswrapper[4773]: I0122 13:35:39.999504 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d6481c-aa92-4c21-8d99-7154c471951b-operator-scripts\") pod \"d8d6481c-aa92-4c21-8d99-7154c471951b\" (UID: \"d8d6481c-aa92-4c21-8d99-7154c471951b\") " Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.000481 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8d6481c-aa92-4c21-8d99-7154c471951b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d8d6481c-aa92-4c21-8d99-7154c471951b" (UID: "d8d6481c-aa92-4c21-8d99-7154c471951b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.005460 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8d6481c-aa92-4c21-8d99-7154c471951b-kube-api-access-4qb64" (OuterVolumeSpecName: "kube-api-access-4qb64") pod "d8d6481c-aa92-4c21-8d99-7154c471951b" (UID: "d8d6481c-aa92-4c21-8d99-7154c471951b"). InnerVolumeSpecName "kube-api-access-4qb64". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.102963 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qb64\" (UniqueName: \"kubernetes.io/projected/d8d6481c-aa92-4c21-8d99-7154c471951b-kube-api-access-4qb64\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.103027 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8d6481c-aa92-4c21-8d99-7154c471951b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.499028 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-1767-account-create-update-h75t5" event={"ID":"d8d6481c-aa92-4c21-8d99-7154c471951b","Type":"ContainerDied","Data":"a023fb54e79bfdf25709f5fac46b61ace644c3649705b738d23f22dc0d1b65a7"} Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.499081 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a023fb54e79bfdf25709f5fac46b61ace644c3649705b738d23f22dc0d1b65a7" Jan 22 13:35:40 crc kubenswrapper[4773]: I0122 13:35:40.499112 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-1767-account-create-update-h75t5" Jan 22 13:35:41 crc kubenswrapper[4773]: I0122 13:35:41.041454 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-825kp"] Jan 22 13:35:41 crc kubenswrapper[4773]: I0122 13:35:41.047956 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-825kp"] Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.320900 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-tkbc8"] Jan 22 13:35:42 crc kubenswrapper[4773]: E0122 13:35:42.323616 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78084244-a181-41ba-9993-87a0d3014a91" containerName="mariadb-database-create" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.323649 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="78084244-a181-41ba-9993-87a0d3014a91" containerName="mariadb-database-create" Jan 22 13:35:42 crc kubenswrapper[4773]: E0122 13:35:42.323729 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8d6481c-aa92-4c21-8d99-7154c471951b" containerName="mariadb-account-create-update" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.323739 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8d6481c-aa92-4c21-8d99-7154c471951b" containerName="mariadb-account-create-update" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.324353 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="78084244-a181-41ba-9993-87a0d3014a91" containerName="mariadb-database-create" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.324399 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8d6481c-aa92-4c21-8d99-7154c471951b" containerName="mariadb-account-create-update" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.325705 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.340695 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-tkbc8"] Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.390180 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76d59e78-a628-42f0-aa02-eb2a5d96dac2-operator-scripts\") pod \"octavia-persistence-db-create-tkbc8\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.390269 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvxss\" (UniqueName: \"kubernetes.io/projected/76d59e78-a628-42f0-aa02-eb2a5d96dac2-kube-api-access-mvxss\") pod \"octavia-persistence-db-create-tkbc8\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.492515 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76d59e78-a628-42f0-aa02-eb2a5d96dac2-operator-scripts\") pod \"octavia-persistence-db-create-tkbc8\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.492699 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvxss\" (UniqueName: \"kubernetes.io/projected/76d59e78-a628-42f0-aa02-eb2a5d96dac2-kube-api-access-mvxss\") pod \"octavia-persistence-db-create-tkbc8\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.494067 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76d59e78-a628-42f0-aa02-eb2a5d96dac2-operator-scripts\") pod \"octavia-persistence-db-create-tkbc8\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.512772 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvxss\" (UniqueName: \"kubernetes.io/projected/76d59e78-a628-42f0-aa02-eb2a5d96dac2-kube-api-access-mvxss\") pod \"octavia-persistence-db-create-tkbc8\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.669779 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a19a55a-727e-4c77-8be7-1bd9c960c1a3" path="/var/lib/kubelet/pods/8a19a55a-727e-4c77-8be7-1bd9c960c1a3/volumes" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.703563 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.881372 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-3cf3-account-create-update-tgt4t"] Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.883838 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.885772 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.895909 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-3cf3-account-create-update-tgt4t"] Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.900621 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64009709-be80-4240-9fa6-15565662fd2e-operator-scripts\") pod \"octavia-3cf3-account-create-update-tgt4t\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:42 crc kubenswrapper[4773]: I0122 13:35:42.900686 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5svkq\" (UniqueName: \"kubernetes.io/projected/64009709-be80-4240-9fa6-15565662fd2e-kube-api-access-5svkq\") pod \"octavia-3cf3-account-create-update-tgt4t\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.002700 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64009709-be80-4240-9fa6-15565662fd2e-operator-scripts\") pod \"octavia-3cf3-account-create-update-tgt4t\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.002777 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5svkq\" (UniqueName: \"kubernetes.io/projected/64009709-be80-4240-9fa6-15565662fd2e-kube-api-access-5svkq\") pod \"octavia-3cf3-account-create-update-tgt4t\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.003922 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64009709-be80-4240-9fa6-15565662fd2e-operator-scripts\") pod \"octavia-3cf3-account-create-update-tgt4t\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.019017 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5svkq\" (UniqueName: \"kubernetes.io/projected/64009709-be80-4240-9fa6-15565662fd2e-kube-api-access-5svkq\") pod \"octavia-3cf3-account-create-update-tgt4t\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.210524 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.216069 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-tkbc8"] Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.532209 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-tkbc8" event={"ID":"76d59e78-a628-42f0-aa02-eb2a5d96dac2","Type":"ContainerStarted","Data":"f33dd1bb673722298edfa0578a13ea0bec6c4fd19dd0a53d92c07ba84bc2f579"} Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.532546 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-tkbc8" event={"ID":"76d59e78-a628-42f0-aa02-eb2a5d96dac2","Type":"ContainerStarted","Data":"3eceb2f27f7e6c7c5311a8270c350468784a721e1bd760c13fcd5e43c8964b04"} Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.547189 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-persistence-db-create-tkbc8" podStartSLOduration=1.547166287 podStartE2EDuration="1.547166287s" podCreationTimestamp="2026-01-22 13:35:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:35:43.544988606 +0000 UTC m=+6051.123104431" watchObservedRunningTime="2026-01-22 13:35:43.547166287 +0000 UTC m=+6051.125282112" Jan 22 13:35:43 crc kubenswrapper[4773]: I0122 13:35:43.658214 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-3cf3-account-create-update-tgt4t"] Jan 22 13:35:43 crc kubenswrapper[4773]: W0122 13:35:43.660201 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64009709_be80_4240_9fa6_15565662fd2e.slice/crio-5a3381100d5869c4b55da54066739af9fd860a8d01096b4f7682426b7e29130b WatchSource:0}: Error finding container 5a3381100d5869c4b55da54066739af9fd860a8d01096b4f7682426b7e29130b: Status 404 returned error can't find the container with id 5a3381100d5869c4b55da54066739af9fd860a8d01096b4f7682426b7e29130b Jan 22 13:35:44 crc kubenswrapper[4773]: I0122 13:35:44.542435 4773 generic.go:334] "Generic (PLEG): container finished" podID="76d59e78-a628-42f0-aa02-eb2a5d96dac2" containerID="f33dd1bb673722298edfa0578a13ea0bec6c4fd19dd0a53d92c07ba84bc2f579" exitCode=0 Jan 22 13:35:44 crc kubenswrapper[4773]: I0122 13:35:44.542514 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-tkbc8" event={"ID":"76d59e78-a628-42f0-aa02-eb2a5d96dac2","Type":"ContainerDied","Data":"f33dd1bb673722298edfa0578a13ea0bec6c4fd19dd0a53d92c07ba84bc2f579"} Jan 22 13:35:44 crc kubenswrapper[4773]: I0122 13:35:44.544843 4773 generic.go:334] "Generic (PLEG): container finished" podID="64009709-be80-4240-9fa6-15565662fd2e" containerID="41fb6d9f0a8ec944d628ac02af22b171d27c0b1b1eb264b243c3476727176991" exitCode=0 Jan 22 13:35:44 crc kubenswrapper[4773]: I0122 13:35:44.544875 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3cf3-account-create-update-tgt4t" event={"ID":"64009709-be80-4240-9fa6-15565662fd2e","Type":"ContainerDied","Data":"41fb6d9f0a8ec944d628ac02af22b171d27c0b1b1eb264b243c3476727176991"} Jan 22 13:35:44 crc kubenswrapper[4773]: I0122 13:35:44.544895 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3cf3-account-create-update-tgt4t" event={"ID":"64009709-be80-4240-9fa6-15565662fd2e","Type":"ContainerStarted","Data":"5a3381100d5869c4b55da54066739af9fd860a8d01096b4f7682426b7e29130b"} Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.016211 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.023706 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.194076 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvxss\" (UniqueName: \"kubernetes.io/projected/76d59e78-a628-42f0-aa02-eb2a5d96dac2-kube-api-access-mvxss\") pod \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.194222 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5svkq\" (UniqueName: \"kubernetes.io/projected/64009709-be80-4240-9fa6-15565662fd2e-kube-api-access-5svkq\") pod \"64009709-be80-4240-9fa6-15565662fd2e\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.194277 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64009709-be80-4240-9fa6-15565662fd2e-operator-scripts\") pod \"64009709-be80-4240-9fa6-15565662fd2e\" (UID: \"64009709-be80-4240-9fa6-15565662fd2e\") " Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.194390 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76d59e78-a628-42f0-aa02-eb2a5d96dac2-operator-scripts\") pod \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\" (UID: \"76d59e78-a628-42f0-aa02-eb2a5d96dac2\") " Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.195025 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64009709-be80-4240-9fa6-15565662fd2e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "64009709-be80-4240-9fa6-15565662fd2e" (UID: "64009709-be80-4240-9fa6-15565662fd2e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.195217 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76d59e78-a628-42f0-aa02-eb2a5d96dac2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76d59e78-a628-42f0-aa02-eb2a5d96dac2" (UID: "76d59e78-a628-42f0-aa02-eb2a5d96dac2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.199707 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76d59e78-a628-42f0-aa02-eb2a5d96dac2-kube-api-access-mvxss" (OuterVolumeSpecName: "kube-api-access-mvxss") pod "76d59e78-a628-42f0-aa02-eb2a5d96dac2" (UID: "76d59e78-a628-42f0-aa02-eb2a5d96dac2"). InnerVolumeSpecName "kube-api-access-mvxss". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.199919 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64009709-be80-4240-9fa6-15565662fd2e-kube-api-access-5svkq" (OuterVolumeSpecName: "kube-api-access-5svkq") pod "64009709-be80-4240-9fa6-15565662fd2e" (UID: "64009709-be80-4240-9fa6-15565662fd2e"). InnerVolumeSpecName "kube-api-access-5svkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.297321 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvxss\" (UniqueName: \"kubernetes.io/projected/76d59e78-a628-42f0-aa02-eb2a5d96dac2-kube-api-access-mvxss\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.297396 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5svkq\" (UniqueName: \"kubernetes.io/projected/64009709-be80-4240-9fa6-15565662fd2e-kube-api-access-5svkq\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.297425 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64009709-be80-4240-9fa6-15565662fd2e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.297441 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76d59e78-a628-42f0-aa02-eb2a5d96dac2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.563822 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-tkbc8" event={"ID":"76d59e78-a628-42f0-aa02-eb2a5d96dac2","Type":"ContainerDied","Data":"3eceb2f27f7e6c7c5311a8270c350468784a721e1bd760c13fcd5e43c8964b04"} Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.563850 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-tkbc8" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.563862 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3eceb2f27f7e6c7c5311a8270c350468784a721e1bd760c13fcd5e43c8964b04" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.565607 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-3cf3-account-create-update-tgt4t" event={"ID":"64009709-be80-4240-9fa6-15565662fd2e","Type":"ContainerDied","Data":"5a3381100d5869c4b55da54066739af9fd860a8d01096b4f7682426b7e29130b"} Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.565631 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a3381100d5869c4b55da54066739af9fd860a8d01096b4f7682426b7e29130b" Jan 22 13:35:46 crc kubenswrapper[4773]: I0122 13:35:46.565653 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-3cf3-account-create-update-tgt4t" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.462995 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-74488454c7-sdkm9"] Jan 22 13:35:48 crc kubenswrapper[4773]: E0122 13:35:48.463792 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d59e78-a628-42f0-aa02-eb2a5d96dac2" containerName="mariadb-database-create" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.463809 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d59e78-a628-42f0-aa02-eb2a5d96dac2" containerName="mariadb-database-create" Jan 22 13:35:48 crc kubenswrapper[4773]: E0122 13:35:48.463834 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64009709-be80-4240-9fa6-15565662fd2e" containerName="mariadb-account-create-update" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.463841 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="64009709-be80-4240-9fa6-15565662fd2e" containerName="mariadb-account-create-update" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.464040 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d59e78-a628-42f0-aa02-eb2a5d96dac2" containerName="mariadb-database-create" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.464065 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="64009709-be80-4240-9fa6-15565662fd2e" containerName="mariadb-account-create-update" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.468098 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.471964 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.472086 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-octavia-ovndbs" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.472201 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.472421 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-bv7lg" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.480369 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-74488454c7-sdkm9"] Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.647820 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-ovndb-tls-certs\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.647894 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data-merged\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.647936 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-octavia-run\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.648010 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-combined-ca-bundle\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.648040 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-scripts\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.648075 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.750919 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data-merged\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.751214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-octavia-run\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.751331 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-combined-ca-bundle\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.751366 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-scripts\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.751403 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.751458 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-ovndb-tls-certs\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.752793 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-octavia-run\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.753079 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data-merged\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.758506 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.759277 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-combined-ca-bundle\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.760228 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-ovndb-tls-certs\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.764077 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-scripts\") pod \"octavia-api-74488454c7-sdkm9\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:48 crc kubenswrapper[4773]: I0122 13:35:48.794514 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:35:49 crc kubenswrapper[4773]: I0122 13:35:49.297517 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-74488454c7-sdkm9"] Jan 22 13:35:49 crc kubenswrapper[4773]: I0122 13:35:49.629371 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerStarted","Data":"380430e513d9041ba30b684cbbbc624a6ee20bd9322a2a4f4337f749b53436e8"} Jan 22 13:35:58 crc kubenswrapper[4773]: I0122 13:35:58.061552 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-2hpvb"] Jan 22 13:35:58 crc kubenswrapper[4773]: I0122 13:35:58.070811 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-2hpvb"] Jan 22 13:35:58 crc kubenswrapper[4773]: I0122 13:35:58.668583 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="381a6d42-86f2-4444-ae9b-af0f9918d115" path="/var/lib/kubelet/pods/381a6d42-86f2-4444-ae9b-af0f9918d115/volumes" Jan 22 13:35:58 crc kubenswrapper[4773]: I0122 13:35:58.730938 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerStarted","Data":"8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2"} Jan 22 13:35:59 crc kubenswrapper[4773]: I0122 13:35:59.754730 4773 generic.go:334] "Generic (PLEG): container finished" podID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerID="8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2" exitCode=0 Jan 22 13:35:59 crc kubenswrapper[4773]: I0122 13:35:59.754940 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerDied","Data":"8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2"} Jan 22 13:36:00 crc kubenswrapper[4773]: I0122 13:36:00.768841 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerStarted","Data":"dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0"} Jan 22 13:36:00 crc kubenswrapper[4773]: I0122 13:36:00.769018 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerStarted","Data":"e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db"} Jan 22 13:36:00 crc kubenswrapper[4773]: I0122 13:36:00.770524 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:36:00 crc kubenswrapper[4773]: I0122 13:36:00.770550 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:36:00 crc kubenswrapper[4773]: I0122 13:36:00.804917 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-74488454c7-sdkm9" podStartSLOduration=3.666952408 podStartE2EDuration="12.804892958s" podCreationTimestamp="2026-01-22 13:35:48 +0000 UTC" firstStartedPulling="2026-01-22 13:35:49.307445603 +0000 UTC m=+6056.885561428" lastFinishedPulling="2026-01-22 13:35:58.445386143 +0000 UTC m=+6066.023501978" observedRunningTime="2026-01-22 13:36:00.800832674 +0000 UTC m=+6068.378948499" watchObservedRunningTime="2026-01-22 13:36:00.804892958 +0000 UTC m=+6068.383008783" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.626526 4773 scope.go:117] "RemoveContainer" containerID="4493a71c8473fcb4b98525c32baf48effc78d0d20fa9df9423f3a6011b050479" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.666067 4773 scope.go:117] "RemoveContainer" containerID="97ea39e12b895cfbfb3057514322954bdb50697f9b2cf264a89a8fc97b197890" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.724133 4773 scope.go:117] "RemoveContainer" containerID="81bcf52f424f152c063a670f1d4cf1b5e0fd41058217f011784e661ffdc406a2" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.748774 4773 scope.go:117] "RemoveContainer" containerID="bd3e120d71e493df75912d5e613d25ab17046e440593bc7508487d70eb308c05" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.799947 4773 scope.go:117] "RemoveContainer" containerID="1e89ebca9eceddf72d3cd58fc9b10b78bbf00c1141580acf8cc7b7c10d22ac76" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.847770 4773 scope.go:117] "RemoveContainer" containerID="8cbd0324163e50da599ad7216870f0f9e4a9f11145302132234de25c57346386" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.923604 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mhm89"] Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.927101 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:05 crc kubenswrapper[4773]: I0122 13:36:05.943473 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mhm89"] Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.021809 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-utilities\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.021866 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-catalog-content\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.021909 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7dlf\" (UniqueName: \"kubernetes.io/projected/bcbdf434-2048-47ad-935a-c7f4aea97f72-kube-api-access-j7dlf\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.123522 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-utilities\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.123578 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-catalog-content\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.123605 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7dlf\" (UniqueName: \"kubernetes.io/projected/bcbdf434-2048-47ad-935a-c7f4aea97f72-kube-api-access-j7dlf\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.124092 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-catalog-content\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.124362 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-utilities\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.144337 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7dlf\" (UniqueName: \"kubernetes.io/projected/bcbdf434-2048-47ad-935a-c7f4aea97f72-kube-api-access-j7dlf\") pod \"certified-operators-mhm89\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.270408 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.800951 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mhm89"] Jan 22 13:36:06 crc kubenswrapper[4773]: I0122 13:36:06.836467 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerStarted","Data":"072007fbc854fce7a4a8972baf1cc967312518f1361fc8c65311cb113711abcf"} Jan 22 13:36:07 crc kubenswrapper[4773]: I0122 13:36:07.849184 4773 generic.go:334] "Generic (PLEG): container finished" podID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerID="ea438151a539c61f22762cad63a59b35a1944363ef55a1538c7a355ac608c126" exitCode=0 Jan 22 13:36:07 crc kubenswrapper[4773]: I0122 13:36:07.849256 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerDied","Data":"ea438151a539c61f22762cad63a59b35a1944363ef55a1538c7a355ac608c126"} Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.415251 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-v6vjx" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.481392 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.513768 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5m9h5" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.669218 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-v6vjx-config-rcqwq"] Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.670774 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.677813 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v6vjx-config-rcqwq"] Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.681588 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.693768 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.693830 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csd7l\" (UniqueName: \"kubernetes.io/projected/8be312cf-a7cb-4371-81cd-8632bdd4cee9-kube-api-access-csd7l\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.693863 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-log-ovn\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.694044 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run-ovn\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.694165 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-scripts\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.694220 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-additional-scripts\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.797635 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-scripts\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.797705 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-additional-scripts\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.797757 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.797784 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csd7l\" (UniqueName: \"kubernetes.io/projected/8be312cf-a7cb-4371-81cd-8632bdd4cee9-kube-api-access-csd7l\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.797809 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-log-ovn\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.797894 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run-ovn\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.798261 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run-ovn\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.798443 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.799202 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-additional-scripts\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.799656 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-log-ovn\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.800086 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-scripts\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.818989 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csd7l\" (UniqueName: \"kubernetes.io/projected/8be312cf-a7cb-4371-81cd-8632bdd4cee9-kube-api-access-csd7l\") pod \"ovn-controller-v6vjx-config-rcqwq\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.863405 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerStarted","Data":"b56b44e8d048d16059f1360402019d5475e4ac29f89c86328c17b75cd157b1ff"} Jan 22 13:36:08 crc kubenswrapper[4773]: I0122 13:36:08.993644 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:09 crc kubenswrapper[4773]: I0122 13:36:09.460658 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v6vjx-config-rcqwq"] Jan 22 13:36:09 crc kubenswrapper[4773]: W0122 13:36:09.478937 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8be312cf_a7cb_4371_81cd_8632bdd4cee9.slice/crio-291a24672e045367cd0cd74ba77360b53e42e71eecce04fd6faa06f8a6e6db44 WatchSource:0}: Error finding container 291a24672e045367cd0cd74ba77360b53e42e71eecce04fd6faa06f8a6e6db44: Status 404 returned error can't find the container with id 291a24672e045367cd0cd74ba77360b53e42e71eecce04fd6faa06f8a6e6db44 Jan 22 13:36:09 crc kubenswrapper[4773]: I0122 13:36:09.874822 4773 generic.go:334] "Generic (PLEG): container finished" podID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerID="b56b44e8d048d16059f1360402019d5475e4ac29f89c86328c17b75cd157b1ff" exitCode=0 Jan 22 13:36:09 crc kubenswrapper[4773]: I0122 13:36:09.875192 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerDied","Data":"b56b44e8d048d16059f1360402019d5475e4ac29f89c86328c17b75cd157b1ff"} Jan 22 13:36:09 crc kubenswrapper[4773]: I0122 13:36:09.879239 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-rcqwq" event={"ID":"8be312cf-a7cb-4371-81cd-8632bdd4cee9","Type":"ContainerStarted","Data":"834a5a5e9606d2c9fa65118e06ee2eb3185115cf3698022c460cccb1d05a0a16"} Jan 22 13:36:09 crc kubenswrapper[4773]: I0122 13:36:09.879302 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-rcqwq" event={"ID":"8be312cf-a7cb-4371-81cd-8632bdd4cee9","Type":"ContainerStarted","Data":"291a24672e045367cd0cd74ba77360b53e42e71eecce04fd6faa06f8a6e6db44"} Jan 22 13:36:10 crc kubenswrapper[4773]: I0122 13:36:10.895741 4773 generic.go:334] "Generic (PLEG): container finished" podID="8be312cf-a7cb-4371-81cd-8632bdd4cee9" containerID="834a5a5e9606d2c9fa65118e06ee2eb3185115cf3698022c460cccb1d05a0a16" exitCode=0 Jan 22 13:36:10 crc kubenswrapper[4773]: I0122 13:36:10.895837 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-rcqwq" event={"ID":"8be312cf-a7cb-4371-81cd-8632bdd4cee9","Type":"ContainerDied","Data":"834a5a5e9606d2c9fa65118e06ee2eb3185115cf3698022c460cccb1d05a0a16"} Jan 22 13:36:10 crc kubenswrapper[4773]: I0122 13:36:10.905378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerStarted","Data":"35c674aa52aae1bacd2dfccb0916d5113b2326330883dc5a604491992a877d02"} Jan 22 13:36:10 crc kubenswrapper[4773]: I0122 13:36:10.943135 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mhm89" podStartSLOduration=3.241907441 podStartE2EDuration="5.943109451s" podCreationTimestamp="2026-01-22 13:36:05 +0000 UTC" firstStartedPulling="2026-01-22 13:36:07.854017429 +0000 UTC m=+6075.432133254" lastFinishedPulling="2026-01-22 13:36:10.555219409 +0000 UTC m=+6078.133335264" observedRunningTime="2026-01-22 13:36:10.939032536 +0000 UTC m=+6078.517148391" watchObservedRunningTime="2026-01-22 13:36:10.943109451 +0000 UTC m=+6078.521225296" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.365198 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477391 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run\") pod \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477466 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-scripts\") pod \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477497 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run" (OuterVolumeSpecName: "var-run") pod "8be312cf-a7cb-4371-81cd-8632bdd4cee9" (UID: "8be312cf-a7cb-4371-81cd-8632bdd4cee9"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477588 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csd7l\" (UniqueName: \"kubernetes.io/projected/8be312cf-a7cb-4371-81cd-8632bdd4cee9-kube-api-access-csd7l\") pod \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477710 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-log-ovn\") pod \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477738 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run-ovn\") pod \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477803 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8be312cf-a7cb-4371-81cd-8632bdd4cee9" (UID: "8be312cf-a7cb-4371-81cd-8632bdd4cee9"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477828 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-additional-scripts\") pod \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\" (UID: \"8be312cf-a7cb-4371-81cd-8632bdd4cee9\") " Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.477847 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8be312cf-a7cb-4371-81cd-8632bdd4cee9" (UID: "8be312cf-a7cb-4371-81cd-8632bdd4cee9"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.478334 4773 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.478358 4773 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.478368 4773 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8be312cf-a7cb-4371-81cd-8632bdd4cee9-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.478480 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "8be312cf-a7cb-4371-81cd-8632bdd4cee9" (UID: "8be312cf-a7cb-4371-81cd-8632bdd4cee9"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.478952 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-scripts" (OuterVolumeSpecName: "scripts") pod "8be312cf-a7cb-4371-81cd-8632bdd4cee9" (UID: "8be312cf-a7cb-4371-81cd-8632bdd4cee9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.484164 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8be312cf-a7cb-4371-81cd-8632bdd4cee9-kube-api-access-csd7l" (OuterVolumeSpecName: "kube-api-access-csd7l") pod "8be312cf-a7cb-4371-81cd-8632bdd4cee9" (UID: "8be312cf-a7cb-4371-81cd-8632bdd4cee9"). InnerVolumeSpecName "kube-api-access-csd7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.580639 4773 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.580672 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8be312cf-a7cb-4371-81cd-8632bdd4cee9-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.580683 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csd7l\" (UniqueName: \"kubernetes.io/projected/8be312cf-a7cb-4371-81cd-8632bdd4cee9-kube-api-access-csd7l\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.928539 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-rcqwq" event={"ID":"8be312cf-a7cb-4371-81cd-8632bdd4cee9","Type":"ContainerDied","Data":"291a24672e045367cd0cd74ba77360b53e42e71eecce04fd6faa06f8a6e6db44"} Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.928585 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="291a24672e045367cd0cd74ba77360b53e42e71eecce04fd6faa06f8a6e6db44" Jan 22 13:36:12 crc kubenswrapper[4773]: I0122 13:36:12.928690 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-rcqwq" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.455561 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-v6vjx-config-rcqwq"] Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.463937 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-v6vjx-config-rcqwq"] Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.625012 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-v6vjx-config-7pp2n"] Jan 22 13:36:13 crc kubenswrapper[4773]: E0122 13:36:13.625607 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be312cf-a7cb-4371-81cd-8632bdd4cee9" containerName="ovn-config" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.625631 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be312cf-a7cb-4371-81cd-8632bdd4cee9" containerName="ovn-config" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.625894 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be312cf-a7cb-4371-81cd-8632bdd4cee9" containerName="ovn-config" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.626737 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.629264 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.635137 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v6vjx-config-7pp2n"] Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.700689 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run-ovn\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.700766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtpvz\" (UniqueName: \"kubernetes.io/projected/af056c90-c895-4fca-a4d3-72b81de41d45-kube-api-access-mtpvz\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.700799 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.700919 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-additional-scripts\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.701007 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-scripts\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.701112 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-log-ovn\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.803441 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-log-ovn\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.803518 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run-ovn\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.803557 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtpvz\" (UniqueName: \"kubernetes.io/projected/af056c90-c895-4fca-a4d3-72b81de41d45-kube-api-access-mtpvz\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.803582 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.803873 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-log-ovn\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.803909 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.804015 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run-ovn\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.804027 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-additional-scripts\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.804078 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-scripts\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.804893 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-additional-scripts\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.806104 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-scripts\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.824031 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtpvz\" (UniqueName: \"kubernetes.io/projected/af056c90-c895-4fca-a4d3-72b81de41d45-kube-api-access-mtpvz\") pod \"ovn-controller-v6vjx-config-7pp2n\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:13 crc kubenswrapper[4773]: I0122 13:36:13.951721 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:14 crc kubenswrapper[4773]: I0122 13:36:14.467981 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-v6vjx-config-7pp2n"] Jan 22 13:36:14 crc kubenswrapper[4773]: I0122 13:36:14.673570 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8be312cf-a7cb-4371-81cd-8632bdd4cee9" path="/var/lib/kubelet/pods/8be312cf-a7cb-4371-81cd-8632bdd4cee9/volumes" Jan 22 13:36:14 crc kubenswrapper[4773]: I0122 13:36:14.948962 4773 generic.go:334] "Generic (PLEG): container finished" podID="af056c90-c895-4fca-a4d3-72b81de41d45" containerID="1192aad70877cc6f305e6f5eaab1aaef6d7d514ac3e5c98a96c80ac4978a8be3" exitCode=0 Jan 22 13:36:14 crc kubenswrapper[4773]: I0122 13:36:14.949055 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-7pp2n" event={"ID":"af056c90-c895-4fca-a4d3-72b81de41d45","Type":"ContainerDied","Data":"1192aad70877cc6f305e6f5eaab1aaef6d7d514ac3e5c98a96c80ac4978a8be3"} Jan 22 13:36:14 crc kubenswrapper[4773]: I0122 13:36:14.949283 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-7pp2n" event={"ID":"af056c90-c895-4fca-a4d3-72b81de41d45","Type":"ContainerStarted","Data":"29cd0ed6146c54ce576c6ad09f7196ececb5a3d3b77f0e434b9cc1c047fa8ad0"} Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.270624 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.270938 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.327563 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.389677 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582772 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run-ovn\") pod \"af056c90-c895-4fca-a4d3-72b81de41d45\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582815 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run\") pod \"af056c90-c895-4fca-a4d3-72b81de41d45\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582860 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-log-ovn\") pod \"af056c90-c895-4fca-a4d3-72b81de41d45\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582896 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "af056c90-c895-4fca-a4d3-72b81de41d45" (UID: "af056c90-c895-4fca-a4d3-72b81de41d45"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582948 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtpvz\" (UniqueName: \"kubernetes.io/projected/af056c90-c895-4fca-a4d3-72b81de41d45-kube-api-access-mtpvz\") pod \"af056c90-c895-4fca-a4d3-72b81de41d45\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582954 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run" (OuterVolumeSpecName: "var-run") pod "af056c90-c895-4fca-a4d3-72b81de41d45" (UID: "af056c90-c895-4fca-a4d3-72b81de41d45"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.582980 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "af056c90-c895-4fca-a4d3-72b81de41d45" (UID: "af056c90-c895-4fca-a4d3-72b81de41d45"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.583013 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-additional-scripts\") pod \"af056c90-c895-4fca-a4d3-72b81de41d45\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.583083 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-scripts\") pod \"af056c90-c895-4fca-a4d3-72b81de41d45\" (UID: \"af056c90-c895-4fca-a4d3-72b81de41d45\") " Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.583572 4773 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.583599 4773 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.583611 4773 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/af056c90-c895-4fca-a4d3-72b81de41d45-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.583896 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "af056c90-c895-4fca-a4d3-72b81de41d45" (UID: "af056c90-c895-4fca-a4d3-72b81de41d45"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.584182 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-scripts" (OuterVolumeSpecName: "scripts") pod "af056c90-c895-4fca-a4d3-72b81de41d45" (UID: "af056c90-c895-4fca-a4d3-72b81de41d45"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.600464 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af056c90-c895-4fca-a4d3-72b81de41d45-kube-api-access-mtpvz" (OuterVolumeSpecName: "kube-api-access-mtpvz") pod "af056c90-c895-4fca-a4d3-72b81de41d45" (UID: "af056c90-c895-4fca-a4d3-72b81de41d45"). InnerVolumeSpecName "kube-api-access-mtpvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.685343 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtpvz\" (UniqueName: \"kubernetes.io/projected/af056c90-c895-4fca-a4d3-72b81de41d45-kube-api-access-mtpvz\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.685385 4773 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.685404 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af056c90-c895-4fca-a4d3-72b81de41d45-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.976787 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-v6vjx-config-7pp2n" event={"ID":"af056c90-c895-4fca-a4d3-72b81de41d45","Type":"ContainerDied","Data":"29cd0ed6146c54ce576c6ad09f7196ececb5a3d3b77f0e434b9cc1c047fa8ad0"} Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.977235 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29cd0ed6146c54ce576c6ad09f7196ececb5a3d3b77f0e434b9cc1c047fa8ad0" Jan 22 13:36:16 crc kubenswrapper[4773]: I0122 13:36:16.976821 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-v6vjx-config-7pp2n" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.032201 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.111307 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-b2pfg"] Jan 22 13:36:17 crc kubenswrapper[4773]: E0122 13:36:17.111781 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af056c90-c895-4fca-a4d3-72b81de41d45" containerName="ovn-config" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.111798 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="af056c90-c895-4fca-a4d3-72b81de41d45" containerName="ovn-config" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.112057 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="af056c90-c895-4fca-a4d3-72b81de41d45" containerName="ovn-config" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.113080 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.121330 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.121334 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.121722 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.149883 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mhm89"] Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.161460 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-b2pfg"] Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.299859 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef162fcd-6b27-4406-ac32-b6efa877ed0f-scripts\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.299926 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef162fcd-6b27-4406-ac32-b6efa877ed0f-config-data\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.300037 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ef162fcd-6b27-4406-ac32-b6efa877ed0f-config-data-merged\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.300084 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ef162fcd-6b27-4406-ac32-b6efa877ed0f-hm-ports\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.402405 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ef162fcd-6b27-4406-ac32-b6efa877ed0f-hm-ports\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.402511 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef162fcd-6b27-4406-ac32-b6efa877ed0f-scripts\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.402569 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef162fcd-6b27-4406-ac32-b6efa877ed0f-config-data\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.402685 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ef162fcd-6b27-4406-ac32-b6efa877ed0f-config-data-merged\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.403206 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ef162fcd-6b27-4406-ac32-b6efa877ed0f-config-data-merged\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.404026 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ef162fcd-6b27-4406-ac32-b6efa877ed0f-hm-ports\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.409136 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef162fcd-6b27-4406-ac32-b6efa877ed0f-scripts\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.413751 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef162fcd-6b27-4406-ac32-b6efa877ed0f-config-data\") pod \"octavia-rsyslog-b2pfg\" (UID: \"ef162fcd-6b27-4406-ac32-b6efa877ed0f\") " pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.451601 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.497214 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-v6vjx-config-7pp2n"] Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.516441 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-v6vjx-config-7pp2n"] Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.967810 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-cskrd"] Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.970340 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.974042 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Jan 22 13:36:17 crc kubenswrapper[4773]: I0122 13:36:17.981862 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-cskrd"] Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.118189 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a56bc2b6-0e88-4554-8363-daaa7da2e303-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-cskrd\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.118596 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a56bc2b6-0e88-4554-8363-daaa7da2e303-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-cskrd\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.139567 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-b2pfg"] Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.200129 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-b2pfg"] Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.225856 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a56bc2b6-0e88-4554-8363-daaa7da2e303-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-cskrd\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.225996 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a56bc2b6-0e88-4554-8363-daaa7da2e303-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-cskrd\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.226537 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a56bc2b6-0e88-4554-8363-daaa7da2e303-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-cskrd\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.237105 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a56bc2b6-0e88-4554-8363-daaa7da2e303-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-cskrd\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.317106 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.668986 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af056c90-c895-4fca-a4d3-72b81de41d45" path="/var/lib/kubelet/pods/af056c90-c895-4fca-a4d3-72b81de41d45/volumes" Jan 22 13:36:18 crc kubenswrapper[4773]: W0122 13:36:18.856557 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda56bc2b6_0e88_4554_8363_daaa7da2e303.slice/crio-4338f81270540c42c3088f84ce34875c721c0511d14f106045170f71eb080572 WatchSource:0}: Error finding container 4338f81270540c42c3088f84ce34875c721c0511d14f106045170f71eb080572: Status 404 returned error can't find the container with id 4338f81270540c42c3088f84ce34875c721c0511d14f106045170f71eb080572 Jan 22 13:36:18 crc kubenswrapper[4773]: I0122 13:36:18.861004 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-cskrd"] Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.018552 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-b2pfg" event={"ID":"ef162fcd-6b27-4406-ac32-b6efa877ed0f","Type":"ContainerStarted","Data":"9a8b3b1f2cda42bc8ed5bf0d3c3bf8017f3cde1cf925ae453daf67a16cdd17df"} Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.020765 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mhm89" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="registry-server" containerID="cri-o://35c674aa52aae1bacd2dfccb0916d5113b2326330883dc5a604491992a877d02" gracePeriod=2 Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.020854 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" event={"ID":"a56bc2b6-0e88-4554-8363-daaa7da2e303","Type":"ContainerStarted","Data":"4338f81270540c42c3088f84ce34875c721c0511d14f106045170f71eb080572"} Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.465542 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-76b5ddccbc-vrfvq"] Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.468061 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.472742 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-octavia-internal-svc" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.476019 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-octavia-public-svc" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.488325 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-76b5ddccbc-vrfvq"] Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.568867 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-config-data\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.568938 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-public-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.569007 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-scripts\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.569040 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-ovndb-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.569067 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-combined-ca-bundle\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.569104 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-config-data-merged\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.569147 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-octavia-run\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.569176 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-internal-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.670978 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-scripts\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671046 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-ovndb-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671079 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-combined-ca-bundle\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671119 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-config-data-merged\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671157 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-octavia-run\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671189 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-internal-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671207 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-config-data\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.671251 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-public-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.672164 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-octavia-run\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.672299 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-config-data-merged\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.678915 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-internal-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.680018 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-combined-ca-bundle\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.684158 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-config-data\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.684759 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-public-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.685989 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-ovndb-tls-certs\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.693977 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/badfc18b-9cbe-4976-b7af-d6c1ec52a1c8-scripts\") pod \"octavia-api-76b5ddccbc-vrfvq\" (UID: \"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8\") " pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:19 crc kubenswrapper[4773]: I0122 13:36:19.788913 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.036673 4773 generic.go:334] "Generic (PLEG): container finished" podID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerID="35c674aa52aae1bacd2dfccb0916d5113b2326330883dc5a604491992a877d02" exitCode=0 Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.036788 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerDied","Data":"35c674aa52aae1bacd2dfccb0916d5113b2326330883dc5a604491992a877d02"} Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.037008 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhm89" event={"ID":"bcbdf434-2048-47ad-935a-c7f4aea97f72","Type":"ContainerDied","Data":"072007fbc854fce7a4a8972baf1cc967312518f1361fc8c65311cb113711abcf"} Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.037028 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="072007fbc854fce7a4a8972baf1cc967312518f1361fc8c65311cb113711abcf" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.104946 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.284390 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7dlf\" (UniqueName: \"kubernetes.io/projected/bcbdf434-2048-47ad-935a-c7f4aea97f72-kube-api-access-j7dlf\") pod \"bcbdf434-2048-47ad-935a-c7f4aea97f72\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.284466 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-utilities\") pod \"bcbdf434-2048-47ad-935a-c7f4aea97f72\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.284497 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-catalog-content\") pod \"bcbdf434-2048-47ad-935a-c7f4aea97f72\" (UID: \"bcbdf434-2048-47ad-935a-c7f4aea97f72\") " Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.285882 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-utilities" (OuterVolumeSpecName: "utilities") pod "bcbdf434-2048-47ad-935a-c7f4aea97f72" (UID: "bcbdf434-2048-47ad-935a-c7f4aea97f72"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.289508 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcbdf434-2048-47ad-935a-c7f4aea97f72-kube-api-access-j7dlf" (OuterVolumeSpecName: "kube-api-access-j7dlf") pod "bcbdf434-2048-47ad-935a-c7f4aea97f72" (UID: "bcbdf434-2048-47ad-935a-c7f4aea97f72"). InnerVolumeSpecName "kube-api-access-j7dlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.359672 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bcbdf434-2048-47ad-935a-c7f4aea97f72" (UID: "bcbdf434-2048-47ad-935a-c7f4aea97f72"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.388664 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7dlf\" (UniqueName: \"kubernetes.io/projected/bcbdf434-2048-47ad-935a-c7f4aea97f72-kube-api-access-j7dlf\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.389142 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.389157 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bcbdf434-2048-47ad-935a-c7f4aea97f72-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:20 crc kubenswrapper[4773]: I0122 13:36:20.497622 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-76b5ddccbc-vrfvq"] Jan 22 13:36:20 crc kubenswrapper[4773]: W0122 13:36:20.526087 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbadfc18b_9cbe_4976_b7af_d6c1ec52a1c8.slice/crio-e5cc3a402f67793013f8d4d7f2317f3fda1a7b068780674c6d901b608a648494 WatchSource:0}: Error finding container e5cc3a402f67793013f8d4d7f2317f3fda1a7b068780674c6d901b608a648494: Status 404 returned error can't find the container with id e5cc3a402f67793013f8d4d7f2317f3fda1a7b068780674c6d901b608a648494 Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.045389 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-b2pfg" event={"ID":"ef162fcd-6b27-4406-ac32-b6efa877ed0f","Type":"ContainerStarted","Data":"96ce53358931205dc356ef58868d445530e499a9b99bb93cf1bfeca141d5b774"} Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.048367 4773 generic.go:334] "Generic (PLEG): container finished" podID="badfc18b-9cbe-4976-b7af-d6c1ec52a1c8" containerID="92e15382a0d46cd37f9650e85e741059d9b00185e6fa02bc019422db381e8a8a" exitCode=0 Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.048442 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhm89" Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.049373 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-76b5ddccbc-vrfvq" event={"ID":"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8","Type":"ContainerDied","Data":"92e15382a0d46cd37f9650e85e741059d9b00185e6fa02bc019422db381e8a8a"} Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.049406 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-76b5ddccbc-vrfvq" event={"ID":"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8","Type":"ContainerStarted","Data":"e5cc3a402f67793013f8d4d7f2317f3fda1a7b068780674c6d901b608a648494"} Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.107351 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mhm89"] Jan 22 13:36:21 crc kubenswrapper[4773]: I0122 13:36:21.123089 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mhm89"] Jan 22 13:36:22 crc kubenswrapper[4773]: I0122 13:36:22.066672 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-76b5ddccbc-vrfvq" event={"ID":"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8","Type":"ContainerStarted","Data":"74e978bb903a56a35dcd1b717822f07e9b0a32fc2f2950caa88ec995371c41c6"} Jan 22 13:36:22 crc kubenswrapper[4773]: I0122 13:36:22.673787 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" path="/var/lib/kubelet/pods/bcbdf434-2048-47ad-935a-c7f4aea97f72/volumes" Jan 22 13:36:23 crc kubenswrapper[4773]: I0122 13:36:23.077495 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-76b5ddccbc-vrfvq" event={"ID":"badfc18b-9cbe-4976-b7af-d6c1ec52a1c8","Type":"ContainerStarted","Data":"df555fecd91107184d72db6d41831bd63bcbd4f75969b38651d1b8cd65a39658"} Jan 22 13:36:23 crc kubenswrapper[4773]: I0122 13:36:23.077836 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:23 crc kubenswrapper[4773]: I0122 13:36:23.077884 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:23 crc kubenswrapper[4773]: I0122 13:36:23.115182 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-76b5ddccbc-vrfvq" podStartSLOduration=4.115157285 podStartE2EDuration="4.115157285s" podCreationTimestamp="2026-01-22 13:36:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:36:23.10183033 +0000 UTC m=+6090.679946155" watchObservedRunningTime="2026-01-22 13:36:23.115157285 +0000 UTC m=+6090.693273110" Jan 22 13:36:23 crc kubenswrapper[4773]: I0122 13:36:23.498574 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:36:23 crc kubenswrapper[4773]: I0122 13:36:23.776662 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:36:24 crc kubenswrapper[4773]: I0122 13:36:24.116270 4773 generic.go:334] "Generic (PLEG): container finished" podID="ef162fcd-6b27-4406-ac32-b6efa877ed0f" containerID="96ce53358931205dc356ef58868d445530e499a9b99bb93cf1bfeca141d5b774" exitCode=0 Jan 22 13:36:24 crc kubenswrapper[4773]: I0122 13:36:24.117213 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-b2pfg" event={"ID":"ef162fcd-6b27-4406-ac32-b6efa877ed0f","Type":"ContainerDied","Data":"96ce53358931205dc356ef58868d445530e499a9b99bb93cf1bfeca141d5b774"} Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.742623 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-b6m44"] Jan 22 13:36:27 crc kubenswrapper[4773]: E0122 13:36:27.743445 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="extract-utilities" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.743461 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="extract-utilities" Jan 22 13:36:27 crc kubenswrapper[4773]: E0122 13:36:27.743477 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="registry-server" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.743485 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="registry-server" Jan 22 13:36:27 crc kubenswrapper[4773]: E0122 13:36:27.743497 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="extract-content" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.743504 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="extract-content" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.743696 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcbdf434-2048-47ad-935a-c7f4aea97f72" containerName="registry-server" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.746086 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.748179 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.771730 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-b6m44"] Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.890129 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-scripts\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.890351 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data-merged\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.890418 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.890446 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-combined-ca-bundle\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.992531 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-scripts\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.992664 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data-merged\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.992719 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.992741 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-combined-ca-bundle\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:27 crc kubenswrapper[4773]: I0122 13:36:27.996830 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data-merged\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:28 crc kubenswrapper[4773]: I0122 13:36:27.999987 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-scripts\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:28 crc kubenswrapper[4773]: I0122 13:36:28.000355 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:28 crc kubenswrapper[4773]: I0122 13:36:28.001177 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-combined-ca-bundle\") pod \"octavia-db-sync-b6m44\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:28 crc kubenswrapper[4773]: I0122 13:36:28.073541 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:34 crc kubenswrapper[4773]: I0122 13:36:34.106420 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-b6m44"] Jan 22 13:36:34 crc kubenswrapper[4773]: W0122 13:36:34.134347 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35a5e8ef_a82d_4026_98c0_b57c0a107208.slice/crio-5b24134d00b12ffb1521e2eeb2afd24943467791c555a6b9126076b51e156863 WatchSource:0}: Error finding container 5b24134d00b12ffb1521e2eeb2afd24943467791c555a6b9126076b51e156863: Status 404 returned error can't find the container with id 5b24134d00b12ffb1521e2eeb2afd24943467791c555a6b9126076b51e156863 Jan 22 13:36:34 crc kubenswrapper[4773]: I0122 13:36:34.223763 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-b6m44" event={"ID":"35a5e8ef-a82d-4026-98c0-b57c0a107208","Type":"ContainerStarted","Data":"5b24134d00b12ffb1521e2eeb2afd24943467791c555a6b9126076b51e156863"} Jan 22 13:36:34 crc kubenswrapper[4773]: I0122 13:36:34.226813 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-b2pfg" event={"ID":"ef162fcd-6b27-4406-ac32-b6efa877ed0f","Type":"ContainerStarted","Data":"b528ca21d0770efb12618dbe931357f37d089eeecf8022ffe9109242b15ec06b"} Jan 22 13:36:34 crc kubenswrapper[4773]: I0122 13:36:34.228455 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:36:34 crc kubenswrapper[4773]: I0122 13:36:34.259514 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-b2pfg" podStartSLOduration=1.777927603 podStartE2EDuration="17.259420043s" podCreationTimestamp="2026-01-22 13:36:17 +0000 UTC" firstStartedPulling="2026-01-22 13:36:18.17900551 +0000 UTC m=+6085.757121335" lastFinishedPulling="2026-01-22 13:36:33.66049795 +0000 UTC m=+6101.238613775" observedRunningTime="2026-01-22 13:36:34.250523764 +0000 UTC m=+6101.828639599" watchObservedRunningTime="2026-01-22 13:36:34.259420043 +0000 UTC m=+6101.837535868" Jan 22 13:36:35 crc kubenswrapper[4773]: I0122 13:36:35.244862 4773 generic.go:334] "Generic (PLEG): container finished" podID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerID="07dcdb7979eec8cfc876588c70072ee39491cbc7f126eed9df73aa0625955496" exitCode=0 Jan 22 13:36:35 crc kubenswrapper[4773]: I0122 13:36:35.247993 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-b6m44" event={"ID":"35a5e8ef-a82d-4026-98c0-b57c0a107208","Type":"ContainerDied","Data":"07dcdb7979eec8cfc876588c70072ee39491cbc7f126eed9df73aa0625955496"} Jan 22 13:36:35 crc kubenswrapper[4773]: I0122 13:36:35.257692 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" event={"ID":"a56bc2b6-0e88-4554-8363-daaa7da2e303","Type":"ContainerStarted","Data":"801d03a433d790d75a6bae90e8b1d3e600689bab13bdd6aff4ef14a1894f6ee9"} Jan 22 13:36:36 crc kubenswrapper[4773]: I0122 13:36:36.267130 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-b6m44" event={"ID":"35a5e8ef-a82d-4026-98c0-b57c0a107208","Type":"ContainerStarted","Data":"adf7f96ca2a96b386216f66a53081d021f5ddd46940334b8997eeb7eefb73342"} Jan 22 13:36:37 crc kubenswrapper[4773]: I0122 13:36:37.299214 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-b6m44" podStartSLOduration=10.299183568 podStartE2EDuration="10.299183568s" podCreationTimestamp="2026-01-22 13:36:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:36:37.296411311 +0000 UTC m=+6104.874527136" watchObservedRunningTime="2026-01-22 13:36:37.299183568 +0000 UTC m=+6104.877299423" Jan 22 13:36:38 crc kubenswrapper[4773]: I0122 13:36:38.288421 4773 generic.go:334] "Generic (PLEG): container finished" podID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerID="801d03a433d790d75a6bae90e8b1d3e600689bab13bdd6aff4ef14a1894f6ee9" exitCode=0 Jan 22 13:36:38 crc kubenswrapper[4773]: I0122 13:36:38.288630 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" event={"ID":"a56bc2b6-0e88-4554-8363-daaa7da2e303","Type":"ContainerDied","Data":"801d03a433d790d75a6bae90e8b1d3e600689bab13bdd6aff4ef14a1894f6ee9"} Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.201205 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.291395 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-76b5ddccbc-vrfvq" Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.301193 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" event={"ID":"a56bc2b6-0e88-4554-8363-daaa7da2e303","Type":"ContainerStarted","Data":"5cf764e06007fe45597ac48a80ddabd1fb2d37aa95ab8a96f6425aefdfe6e041"} Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.382972 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-api-74488454c7-sdkm9"] Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.383299 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-api-74488454c7-sdkm9" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api" containerID="cri-o://e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db" gracePeriod=30 Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.383779 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-api-74488454c7-sdkm9" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api-provider-agent" containerID="cri-o://dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0" gracePeriod=30 Jan 22 13:36:39 crc kubenswrapper[4773]: I0122 13:36:39.393803 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" podStartSLOduration=7.3155723120000005 podStartE2EDuration="22.393782738s" podCreationTimestamp="2026-01-22 13:36:17 +0000 UTC" firstStartedPulling="2026-01-22 13:36:18.859158756 +0000 UTC m=+6086.437274581" lastFinishedPulling="2026-01-22 13:36:33.937369182 +0000 UTC m=+6101.515485007" observedRunningTime="2026-01-22 13:36:39.363024884 +0000 UTC m=+6106.941140709" watchObservedRunningTime="2026-01-22 13:36:39.393782738 +0000 UTC m=+6106.971898563" Jan 22 13:36:40 crc kubenswrapper[4773]: I0122 13:36:40.336779 4773 generic.go:334] "Generic (PLEG): container finished" podID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerID="adf7f96ca2a96b386216f66a53081d021f5ddd46940334b8997eeb7eefb73342" exitCode=0 Jan 22 13:36:40 crc kubenswrapper[4773]: I0122 13:36:40.336852 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-b6m44" event={"ID":"35a5e8ef-a82d-4026-98c0-b57c0a107208","Type":"ContainerDied","Data":"adf7f96ca2a96b386216f66a53081d021f5ddd46940334b8997eeb7eefb73342"} Jan 22 13:36:40 crc kubenswrapper[4773]: I0122 13:36:40.344277 4773 generic.go:334] "Generic (PLEG): container finished" podID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerID="dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0" exitCode=0 Jan 22 13:36:40 crc kubenswrapper[4773]: I0122 13:36:40.344323 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerDied","Data":"dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0"} Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.818255 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.893654 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data\") pod \"35a5e8ef-a82d-4026-98c0-b57c0a107208\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.893994 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-combined-ca-bundle\") pod \"35a5e8ef-a82d-4026-98c0-b57c0a107208\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.894062 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data-merged\") pod \"35a5e8ef-a82d-4026-98c0-b57c0a107208\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.894126 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-scripts\") pod \"35a5e8ef-a82d-4026-98c0-b57c0a107208\" (UID: \"35a5e8ef-a82d-4026-98c0-b57c0a107208\") " Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.912033 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data" (OuterVolumeSpecName: "config-data") pod "35a5e8ef-a82d-4026-98c0-b57c0a107208" (UID: "35a5e8ef-a82d-4026-98c0-b57c0a107208"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.914513 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-scripts" (OuterVolumeSpecName: "scripts") pod "35a5e8ef-a82d-4026-98c0-b57c0a107208" (UID: "35a5e8ef-a82d-4026-98c0-b57c0a107208"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.928131 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "35a5e8ef-a82d-4026-98c0-b57c0a107208" (UID: "35a5e8ef-a82d-4026-98c0-b57c0a107208"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.928980 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35a5e8ef-a82d-4026-98c0-b57c0a107208" (UID: "35a5e8ef-a82d-4026-98c0-b57c0a107208"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.997451 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.997496 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data-merged\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.997505 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:41 crc kubenswrapper[4773]: I0122 13:36:41.997514 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35a5e8ef-a82d-4026-98c0-b57c0a107208-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:42 crc kubenswrapper[4773]: I0122 13:36:42.376604 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-b6m44" event={"ID":"35a5e8ef-a82d-4026-98c0-b57c0a107208","Type":"ContainerDied","Data":"5b24134d00b12ffb1521e2eeb2afd24943467791c555a6b9126076b51e156863"} Jan 22 13:36:42 crc kubenswrapper[4773]: I0122 13:36:42.376653 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b24134d00b12ffb1521e2eeb2afd24943467791c555a6b9126076b51e156863" Jan 22 13:36:42 crc kubenswrapper[4773]: I0122 13:36:42.376788 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-b6m44" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.089242 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.261070 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-scripts\") pod \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.261169 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-combined-ca-bundle\") pod \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.261468 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data-merged\") pod \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.261532 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data\") pod \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.261590 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-octavia-run\") pod \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.261618 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-ovndb-tls-certs\") pod \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\" (UID: \"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95\") " Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.263312 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-octavia-run" (OuterVolumeSpecName: "octavia-run") pod "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" (UID: "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95"). InnerVolumeSpecName "octavia-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.281402 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data" (OuterVolumeSpecName: "config-data") pod "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" (UID: "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.281674 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-scripts" (OuterVolumeSpecName: "scripts") pod "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" (UID: "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.346789 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" (UID: "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.349742 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" (UID: "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.364979 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data-merged\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.365019 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.365031 4773 reconciler_common.go:293] "Volume detached for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-octavia-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.365041 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.365053 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.396685 4773 generic.go:334] "Generic (PLEG): container finished" podID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerID="e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db" exitCode=0 Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.396731 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerDied","Data":"e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db"} Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.396762 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-74488454c7-sdkm9" event={"ID":"00887776-2ef2-4c91-ac7f-d9ec0a5dbb95","Type":"ContainerDied","Data":"380430e513d9041ba30b684cbbbc624a6ee20bd9322a2a4f4337f749b53436e8"} Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.396784 4773 scope.go:117] "RemoveContainer" containerID="dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.396943 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-74488454c7-sdkm9" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.449842 4773 scope.go:117] "RemoveContainer" containerID="e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.458135 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" (UID: "00887776-2ef2-4c91-ac7f-d9ec0a5dbb95"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.467842 4773 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.484246 4773 scope.go:117] "RemoveContainer" containerID="8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.515248 4773 scope.go:117] "RemoveContainer" containerID="dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0" Jan 22 13:36:43 crc kubenswrapper[4773]: E0122 13:36:43.516100 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0\": container with ID starting with dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0 not found: ID does not exist" containerID="dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.516324 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0"} err="failed to get container status \"dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0\": rpc error: code = NotFound desc = could not find container \"dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0\": container with ID starting with dadce33fddcefb0721ba0770213c04a775d253fcbea75e1b78dc7328b2b2daf0 not found: ID does not exist" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.516448 4773 scope.go:117] "RemoveContainer" containerID="e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db" Jan 22 13:36:43 crc kubenswrapper[4773]: E0122 13:36:43.516974 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db\": container with ID starting with e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db not found: ID does not exist" containerID="e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.517103 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db"} err="failed to get container status \"e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db\": rpc error: code = NotFound desc = could not find container \"e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db\": container with ID starting with e8a799e6fc3bf1f1549214561c6b824e54ea16e6885d81440547b01defaa27db not found: ID does not exist" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.517207 4773 scope.go:117] "RemoveContainer" containerID="8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2" Jan 22 13:36:43 crc kubenswrapper[4773]: E0122 13:36:43.517615 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2\": container with ID starting with 8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2 not found: ID does not exist" containerID="8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.517636 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2"} err="failed to get container status \"8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2\": rpc error: code = NotFound desc = could not find container \"8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2\": container with ID starting with 8e54d180288b9fcc8c34931642594f97e77e3d20ecd555ed0e6bc3c1e50309a2 not found: ID does not exist" Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.738949 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-api-74488454c7-sdkm9"] Jan 22 13:36:43 crc kubenswrapper[4773]: I0122 13:36:43.751941 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-api-74488454c7-sdkm9"] Jan 22 13:36:43 crc kubenswrapper[4773]: E0122 13:36:43.867296 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00887776_2ef2_4c91_ac7f_d9ec0a5dbb95.slice/crio-380430e513d9041ba30b684cbbbc624a6ee20bd9322a2a4f4337f749b53436e8\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00887776_2ef2_4c91_ac7f_d9ec0a5dbb95.slice\": RecentStats: unable to find data in memory cache]" Jan 22 13:36:44 crc kubenswrapper[4773]: I0122 13:36:44.670703 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" path="/var/lib/kubelet/pods/00887776-2ef2-4c91-ac7f-d9ec0a5dbb95/volumes" Jan 22 13:36:47 crc kubenswrapper[4773]: I0122 13:36:47.488598 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-b2pfg" Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.166786 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-cskrd"] Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.167757 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerName="octavia-amphora-httpd" containerID="cri-o://5cf764e06007fe45597ac48a80ddabd1fb2d37aa95ab8a96f6425aefdfe6e041" gracePeriod=30 Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.576094 4773 generic.go:334] "Generic (PLEG): container finished" podID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerID="5cf764e06007fe45597ac48a80ddabd1fb2d37aa95ab8a96f6425aefdfe6e041" exitCode=0 Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.576483 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" event={"ID":"a56bc2b6-0e88-4554-8363-daaa7da2e303","Type":"ContainerDied","Data":"5cf764e06007fe45597ac48a80ddabd1fb2d37aa95ab8a96f6425aefdfe6e041"} Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.669191 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.787669 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a56bc2b6-0e88-4554-8363-daaa7da2e303-amphora-image\") pod \"a56bc2b6-0e88-4554-8363-daaa7da2e303\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.787742 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a56bc2b6-0e88-4554-8363-daaa7da2e303-httpd-config\") pod \"a56bc2b6-0e88-4554-8363-daaa7da2e303\" (UID: \"a56bc2b6-0e88-4554-8363-daaa7da2e303\") " Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.830402 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a56bc2b6-0e88-4554-8363-daaa7da2e303-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a56bc2b6-0e88-4554-8363-daaa7da2e303" (UID: "a56bc2b6-0e88-4554-8363-daaa7da2e303"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.867531 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a56bc2b6-0e88-4554-8363-daaa7da2e303-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "a56bc2b6-0e88-4554-8363-daaa7da2e303" (UID: "a56bc2b6-0e88-4554-8363-daaa7da2e303"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.891248 4773 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a56bc2b6-0e88-4554-8363-daaa7da2e303-amphora-image\") on node \"crc\" DevicePath \"\"" Jan 22 13:37:02 crc kubenswrapper[4773]: I0122 13:37:02.891299 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a56bc2b6-0e88-4554-8363-daaa7da2e303-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:37:03 crc kubenswrapper[4773]: I0122 13:37:03.607222 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" event={"ID":"a56bc2b6-0e88-4554-8363-daaa7da2e303","Type":"ContainerDied","Data":"4338f81270540c42c3088f84ce34875c721c0511d14f106045170f71eb080572"} Jan 22 13:37:03 crc kubenswrapper[4773]: I0122 13:37:03.607317 4773 scope.go:117] "RemoveContainer" containerID="5cf764e06007fe45597ac48a80ddabd1fb2d37aa95ab8a96f6425aefdfe6e041" Jan 22 13:37:03 crc kubenswrapper[4773]: I0122 13:37:03.607421 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-cskrd" Jan 22 13:37:03 crc kubenswrapper[4773]: I0122 13:37:03.644266 4773 scope.go:117] "RemoveContainer" containerID="801d03a433d790d75a6bae90e8b1d3e600689bab13bdd6aff4ef14a1894f6ee9" Jan 22 13:37:03 crc kubenswrapper[4773]: I0122 13:37:03.644372 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-cskrd"] Jan 22 13:37:03 crc kubenswrapper[4773]: I0122 13:37:03.653333 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-cskrd"] Jan 22 13:37:04 crc kubenswrapper[4773]: I0122 13:37:04.673484 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" path="/var/lib/kubelet/pods/a56bc2b6-0e88-4554-8363-daaa7da2e303/volumes" Jan 22 13:37:06 crc kubenswrapper[4773]: I0122 13:37:06.068249 4773 scope.go:117] "RemoveContainer" containerID="b7cd7d8cef1d6acbb4108ee3c4bc3462ec68e2908d554f7ddfc9dcf2ae5e9196" Jan 22 13:37:06 crc kubenswrapper[4773]: I0122 13:37:06.092581 4773 scope.go:117] "RemoveContainer" containerID="9a7665f59fb03a07996049d567d9ec7e30102b98ed27d9f32c5f60d6f423493e" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129001 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-rnpb2"] Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129454 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerName="octavia-amphora-httpd" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129467 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerName="octavia-amphora-httpd" Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129480 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="init" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129486 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="init" Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129499 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api-provider-agent" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129505 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api-provider-agent" Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129521 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129527 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api" Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129538 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerName="octavia-db-sync" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129545 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerName="octavia-db-sync" Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129558 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerName="init" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129564 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerName="init" Jan 22 13:37:08 crc kubenswrapper[4773]: E0122 13:37:08.129583 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerName="init" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129588 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerName="init" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129778 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a56bc2b6-0e88-4554-8363-daaa7da2e303" containerName="octavia-amphora-httpd" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129798 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api-provider-agent" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129817 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="35a5e8ef-a82d-4026-98c0-b57c0a107208" containerName="octavia-db-sync" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.129834 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="00887776-2ef2-4c91-ac7f-d9ec0a5dbb95" containerName="octavia-api" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.131034 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.141953 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.143874 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-rnpb2"] Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.193849 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cec4fc5b-d5a5-4197-8873-e93b2c58985b-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-rnpb2\" (UID: \"cec4fc5b-d5a5-4197-8873-e93b2c58985b\") " pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.194230 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cec4fc5b-d5a5-4197-8873-e93b2c58985b-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-rnpb2\" (UID: \"cec4fc5b-d5a5-4197-8873-e93b2c58985b\") " pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.298637 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cec4fc5b-d5a5-4197-8873-e93b2c58985b-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-rnpb2\" (UID: \"cec4fc5b-d5a5-4197-8873-e93b2c58985b\") " pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.298781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cec4fc5b-d5a5-4197-8873-e93b2c58985b-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-rnpb2\" (UID: \"cec4fc5b-d5a5-4197-8873-e93b2c58985b\") " pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.299483 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cec4fc5b-d5a5-4197-8873-e93b2c58985b-amphora-image\") pod \"octavia-image-upload-7b97d6bc64-rnpb2\" (UID: \"cec4fc5b-d5a5-4197-8873-e93b2c58985b\") " pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.318027 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cec4fc5b-d5a5-4197-8873-e93b2c58985b-httpd-config\") pod \"octavia-image-upload-7b97d6bc64-rnpb2\" (UID: \"cec4fc5b-d5a5-4197-8873-e93b2c58985b\") " pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.466082 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.965444 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-7b97d6bc64-rnpb2"] Jan 22 13:37:08 crc kubenswrapper[4773]: I0122 13:37:08.966096 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:37:09 crc kubenswrapper[4773]: I0122 13:37:09.666988 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" event={"ID":"cec4fc5b-d5a5-4197-8873-e93b2c58985b","Type":"ContainerStarted","Data":"4b6b5a3acc02b85e2a0741063cde151ffa7d85a8dfccf23ca5125d486ec59df1"} Jan 22 13:37:09 crc kubenswrapper[4773]: I0122 13:37:09.667360 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" event={"ID":"cec4fc5b-d5a5-4197-8873-e93b2c58985b","Type":"ContainerStarted","Data":"5fdfc4697095a608a582083f6bde45408ae348f4f3beaed9c75f141ae0b02933"} Jan 22 13:37:11 crc kubenswrapper[4773]: I0122 13:37:11.686517 4773 generic.go:334] "Generic (PLEG): container finished" podID="cec4fc5b-d5a5-4197-8873-e93b2c58985b" containerID="4b6b5a3acc02b85e2a0741063cde151ffa7d85a8dfccf23ca5125d486ec59df1" exitCode=0 Jan 22 13:37:11 crc kubenswrapper[4773]: I0122 13:37:11.686587 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" event={"ID":"cec4fc5b-d5a5-4197-8873-e93b2c58985b","Type":"ContainerDied","Data":"4b6b5a3acc02b85e2a0741063cde151ffa7d85a8dfccf23ca5125d486ec59df1"} Jan 22 13:37:12 crc kubenswrapper[4773]: I0122 13:37:12.706961 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" event={"ID":"cec4fc5b-d5a5-4197-8873-e93b2c58985b","Type":"ContainerStarted","Data":"93d1429f948e74c681f79123687bf3c0a12b020bd9911895de1b0c69fa41d799"} Jan 22 13:37:12 crc kubenswrapper[4773]: I0122 13:37:12.732798 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-7b97d6bc64-rnpb2" podStartSLOduration=4.3059441419999995 podStartE2EDuration="4.732772508s" podCreationTimestamp="2026-01-22 13:37:08 +0000 UTC" firstStartedPulling="2026-01-22 13:37:08.965838755 +0000 UTC m=+6136.543954580" lastFinishedPulling="2026-01-22 13:37:09.392667121 +0000 UTC m=+6136.970782946" observedRunningTime="2026-01-22 13:37:12.725646467 +0000 UTC m=+6140.303762292" watchObservedRunningTime="2026-01-22 13:37:12.732772508 +0000 UTC m=+6140.310888333" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.655718 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-nj8qw"] Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.658743 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.661745 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.661800 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.661745 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.672016 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-nj8qw"] Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.792956 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d8974658-8091-4884-ad44-7fe8331a02c9-config-data-merged\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.793225 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/d8974658-8091-4884-ad44-7fe8331a02c9-hm-ports\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.793277 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-amphora-certs\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.793536 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-combined-ca-bundle\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.793660 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-scripts\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.793777 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-config-data\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.896124 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-combined-ca-bundle\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.896241 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-scripts\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.896576 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-config-data\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.896676 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d8974658-8091-4884-ad44-7fe8331a02c9-config-data-merged\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.896712 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/d8974658-8091-4884-ad44-7fe8331a02c9-hm-ports\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.896750 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-amphora-certs\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.897917 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d8974658-8091-4884-ad44-7fe8331a02c9-config-data-merged\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.900107 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/d8974658-8091-4884-ad44-7fe8331a02c9-hm-ports\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.903660 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-config-data\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.903952 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-scripts\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.904098 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-amphora-certs\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.906398 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8974658-8091-4884-ad44-7fe8331a02c9-combined-ca-bundle\") pod \"octavia-healthmanager-nj8qw\" (UID: \"d8974658-8091-4884-ad44-7fe8331a02c9\") " pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:23 crc kubenswrapper[4773]: I0122 13:37:23.992023 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:24 crc kubenswrapper[4773]: I0122 13:37:24.551839 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-nj8qw"] Jan 22 13:37:24 crc kubenswrapper[4773]: W0122 13:37:24.554226 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8974658_8091_4884_ad44_7fe8331a02c9.slice/crio-6b970cdcf1aa924f99a08e2cfc36df2fa82ab94ec4646298e2358050f44b34cb WatchSource:0}: Error finding container 6b970cdcf1aa924f99a08e2cfc36df2fa82ab94ec4646298e2358050f44b34cb: Status 404 returned error can't find the container with id 6b970cdcf1aa924f99a08e2cfc36df2fa82ab94ec4646298e2358050f44b34cb Jan 22 13:37:24 crc kubenswrapper[4773]: I0122 13:37:24.840863 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-nj8qw" event={"ID":"d8974658-8091-4884-ad44-7fe8331a02c9","Type":"ContainerStarted","Data":"6b970cdcf1aa924f99a08e2cfc36df2fa82ab94ec4646298e2358050f44b34cb"} Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.679799 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-9nwnm"] Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.682419 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.684703 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.685463 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.692037 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-9nwnm"] Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.836133 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/8ffe7dee-c700-48d6-8231-5d67ba8cd887-hm-ports\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.836193 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-combined-ca-bundle\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.836232 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-config-data\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.836257 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-amphora-certs\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.836342 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-scripts\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.836441 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/8ffe7dee-c700-48d6-8231-5d67ba8cd887-config-data-merged\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.852150 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-nj8qw" event={"ID":"d8974658-8091-4884-ad44-7fe8331a02c9","Type":"ContainerStarted","Data":"95a77138b976696a267353ff786a885d7a006722fc79af258fd8faed495b1e75"} Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.938657 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/8ffe7dee-c700-48d6-8231-5d67ba8cd887-config-data-merged\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.938890 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/8ffe7dee-c700-48d6-8231-5d67ba8cd887-hm-ports\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.938932 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-combined-ca-bundle\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.938962 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-config-data\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.939004 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-amphora-certs\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.939071 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-scripts\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.939546 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/8ffe7dee-c700-48d6-8231-5d67ba8cd887-config-data-merged\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.942270 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/8ffe7dee-c700-48d6-8231-5d67ba8cd887-hm-ports\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.945925 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-combined-ca-bundle\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.946529 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-config-data\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.947676 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-amphora-certs\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:25 crc kubenswrapper[4773]: I0122 13:37:25.948823 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffe7dee-c700-48d6-8231-5d67ba8cd887-scripts\") pod \"octavia-housekeeping-9nwnm\" (UID: \"8ffe7dee-c700-48d6-8231-5d67ba8cd887\") " pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.001015 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.626880 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-9nwnm"] Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.738202 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-6vd2t"] Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.742027 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.749637 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.749637 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.750836 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-6vd2t"] Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.862639 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9nwnm" event={"ID":"8ffe7dee-c700-48d6-8231-5d67ba8cd887","Type":"ContainerStarted","Data":"9649a40033cf613e6318902cebeb9aac2a1d192ea00bb84ceee12a2919275162"} Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.864496 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d878cddf-50a9-4d56-b6ff-872694625d2e-config-data-merged\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.864551 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/d878cddf-50a9-4d56-b6ff-872694625d2e-hm-ports\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.864637 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-scripts\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.864977 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-combined-ca-bundle\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.865071 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-config-data\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.865132 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-amphora-certs\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.967489 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-config-data\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.967553 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-amphora-certs\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.967639 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d878cddf-50a9-4d56-b6ff-872694625d2e-config-data-merged\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.967661 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/d878cddf-50a9-4d56-b6ff-872694625d2e-hm-ports\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.967706 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-scripts\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.967817 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-combined-ca-bundle\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.968188 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/d878cddf-50a9-4d56-b6ff-872694625d2e-config-data-merged\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.968851 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/d878cddf-50a9-4d56-b6ff-872694625d2e-hm-ports\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.975198 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-combined-ca-bundle\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.975485 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-config-data\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.975662 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-scripts\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:26 crc kubenswrapper[4773]: I0122 13:37:26.978065 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/d878cddf-50a9-4d56-b6ff-872694625d2e-amphora-certs\") pod \"octavia-worker-6vd2t\" (UID: \"d878cddf-50a9-4d56-b6ff-872694625d2e\") " pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:27 crc kubenswrapper[4773]: I0122 13:37:27.077910 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:27 crc kubenswrapper[4773]: I0122 13:37:27.885816 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8974658-8091-4884-ad44-7fe8331a02c9" containerID="95a77138b976696a267353ff786a885d7a006722fc79af258fd8faed495b1e75" exitCode=0 Jan 22 13:37:27 crc kubenswrapper[4773]: I0122 13:37:27.886006 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-nj8qw" event={"ID":"d8974658-8091-4884-ad44-7fe8331a02c9","Type":"ContainerDied","Data":"95a77138b976696a267353ff786a885d7a006722fc79af258fd8faed495b1e75"} Jan 22 13:37:27 crc kubenswrapper[4773]: I0122 13:37:27.952710 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-6vd2t"] Jan 22 13:37:28 crc kubenswrapper[4773]: I0122 13:37:28.727280 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-nj8qw"] Jan 22 13:37:28 crc kubenswrapper[4773]: I0122 13:37:28.903872 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6vd2t" event={"ID":"d878cddf-50a9-4d56-b6ff-872694625d2e","Type":"ContainerStarted","Data":"e437909960633d523d8d226909f67b7531ee48956b4db884f33142ba458e1014"} Jan 22 13:37:28 crc kubenswrapper[4773]: I0122 13:37:28.908721 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-nj8qw" event={"ID":"d8974658-8091-4884-ad44-7fe8331a02c9","Type":"ContainerStarted","Data":"4925d742788bc2a3993359a620b1e76cb271eb7de109c0cb51f410639ba2bf67"} Jan 22 13:37:28 crc kubenswrapper[4773]: I0122 13:37:28.913585 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:28 crc kubenswrapper[4773]: I0122 13:37:28.935051 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-nj8qw" podStartSLOduration=5.935029445 podStartE2EDuration="5.935029445s" podCreationTimestamp="2026-01-22 13:37:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:37:28.93309698 +0000 UTC m=+6156.511212825" watchObservedRunningTime="2026-01-22 13:37:28.935029445 +0000 UTC m=+6156.513145270" Jan 22 13:37:29 crc kubenswrapper[4773]: I0122 13:37:29.924470 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9nwnm" event={"ID":"8ffe7dee-c700-48d6-8231-5d67ba8cd887","Type":"ContainerStarted","Data":"af91486b3c1f83e347599d09a8a99e8f39371eb87492b753ab32c4ddbf1a3c3d"} Jan 22 13:37:30 crc kubenswrapper[4773]: I0122 13:37:30.934661 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6vd2t" event={"ID":"d878cddf-50a9-4d56-b6ff-872694625d2e","Type":"ContainerStarted","Data":"bf5f3b0fafa77e1c54e0d96437c5671d1d61b77576bfcd435be67eec0731c914"} Jan 22 13:37:30 crc kubenswrapper[4773]: I0122 13:37:30.939525 4773 generic.go:334] "Generic (PLEG): container finished" podID="8ffe7dee-c700-48d6-8231-5d67ba8cd887" containerID="af91486b3c1f83e347599d09a8a99e8f39371eb87492b753ab32c4ddbf1a3c3d" exitCode=0 Jan 22 13:37:30 crc kubenswrapper[4773]: I0122 13:37:30.940926 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9nwnm" event={"ID":"8ffe7dee-c700-48d6-8231-5d67ba8cd887","Type":"ContainerDied","Data":"af91486b3c1f83e347599d09a8a99e8f39371eb87492b753ab32c4ddbf1a3c3d"} Jan 22 13:37:31 crc kubenswrapper[4773]: I0122 13:37:31.951910 4773 generic.go:334] "Generic (PLEG): container finished" podID="d878cddf-50a9-4d56-b6ff-872694625d2e" containerID="bf5f3b0fafa77e1c54e0d96437c5671d1d61b77576bfcd435be67eec0731c914" exitCode=0 Jan 22 13:37:31 crc kubenswrapper[4773]: I0122 13:37:31.952098 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6vd2t" event={"ID":"d878cddf-50a9-4d56-b6ff-872694625d2e","Type":"ContainerDied","Data":"bf5f3b0fafa77e1c54e0d96437c5671d1d61b77576bfcd435be67eec0731c914"} Jan 22 13:37:31 crc kubenswrapper[4773]: I0122 13:37:31.954822 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-9nwnm" event={"ID":"8ffe7dee-c700-48d6-8231-5d67ba8cd887","Type":"ContainerStarted","Data":"a7ff0ba75370cf97a1985b3b342d0200a7b2e5f03d1cfc3fbdfc379c10445324"} Jan 22 13:37:31 crc kubenswrapper[4773]: I0122 13:37:31.955133 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:32 crc kubenswrapper[4773]: I0122 13:37:32.017039 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-9nwnm" podStartSLOduration=4.956667778 podStartE2EDuration="7.017012186s" podCreationTimestamp="2026-01-22 13:37:25 +0000 UTC" firstStartedPulling="2026-01-22 13:37:26.639438165 +0000 UTC m=+6154.217553990" lastFinishedPulling="2026-01-22 13:37:28.699782573 +0000 UTC m=+6156.277898398" observedRunningTime="2026-01-22 13:37:32.006764538 +0000 UTC m=+6159.584880363" watchObservedRunningTime="2026-01-22 13:37:32.017012186 +0000 UTC m=+6159.595128011" Jan 22 13:37:32 crc kubenswrapper[4773]: I0122 13:37:32.966784 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-6vd2t" event={"ID":"d878cddf-50a9-4d56-b6ff-872694625d2e","Type":"ContainerStarted","Data":"9dbe058f0b0b6054f506627344a37c156396060273a1abb9047c0ded2e8acd95"} Jan 22 13:37:32 crc kubenswrapper[4773]: I0122 13:37:32.967111 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-6vd2t" Jan 22 13:37:32 crc kubenswrapper[4773]: I0122 13:37:32.999052 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-6vd2t" podStartSLOduration=5.046295832 podStartE2EDuration="6.999028316s" podCreationTimestamp="2026-01-22 13:37:26 +0000 UTC" firstStartedPulling="2026-01-22 13:37:27.984799767 +0000 UTC m=+6155.562915592" lastFinishedPulling="2026-01-22 13:37:29.937532251 +0000 UTC m=+6157.515648076" observedRunningTime="2026-01-22 13:37:32.988868671 +0000 UTC m=+6160.566984526" watchObservedRunningTime="2026-01-22 13:37:32.999028316 +0000 UTC m=+6160.577144141" Jan 22 13:37:34 crc kubenswrapper[4773]: I0122 13:37:34.074197 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:37:34 crc kubenswrapper[4773]: I0122 13:37:34.074768 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:37:39 crc kubenswrapper[4773]: I0122 13:37:39.026238 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-nj8qw" Jan 22 13:37:41 crc kubenswrapper[4773]: I0122 13:37:41.030005 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-9nwnm" Jan 22 13:37:42 crc kubenswrapper[4773]: I0122 13:37:42.114506 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-6vd2t" Jan 22 13:38:04 crc kubenswrapper[4773]: I0122 13:38:04.074406 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:38:04 crc kubenswrapper[4773]: I0122 13:38:04.076237 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:38:15 crc kubenswrapper[4773]: I0122 13:38:15.059235 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-kndjf"] Jan 22 13:38:15 crc kubenswrapper[4773]: I0122 13:38:15.072018 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-kndjf"] Jan 22 13:38:16 crc kubenswrapper[4773]: I0122 13:38:16.031029 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2754-account-create-update-xqxq8"] Jan 22 13:38:16 crc kubenswrapper[4773]: I0122 13:38:16.042458 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2754-account-create-update-xqxq8"] Jan 22 13:38:16 crc kubenswrapper[4773]: I0122 13:38:16.677160 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35c73005-43eb-4c3c-ab06-2a5fff27524d" path="/var/lib/kubelet/pods/35c73005-43eb-4c3c-ab06-2a5fff27524d/volumes" Jan 22 13:38:16 crc kubenswrapper[4773]: I0122 13:38:16.679055 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6147fb59-c7e4-4131-b427-9b9e121541a0" path="/var/lib/kubelet/pods/6147fb59-c7e4-4131-b427-9b9e121541a0/volumes" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.069841 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-5zkb4"] Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.080035 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-5zkb4"] Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.288142 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mjxnw"] Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.291460 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.303519 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjxnw"] Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.444132 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xv66\" (UniqueName: \"kubernetes.io/projected/97e04793-9076-4f7a-8228-d9fabee0b2a7-kube-api-access-2xv66\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.444662 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-catalog-content\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.444869 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-utilities\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.548348 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-utilities\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.548467 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xv66\" (UniqueName: \"kubernetes.io/projected/97e04793-9076-4f7a-8228-d9fabee0b2a7-kube-api-access-2xv66\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.548564 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-catalog-content\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.548945 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-utilities\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.548963 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-catalog-content\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.568085 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xv66\" (UniqueName: \"kubernetes.io/projected/97e04793-9076-4f7a-8228-d9fabee0b2a7-kube-api-access-2xv66\") pod \"redhat-marketplace-mjxnw\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.670411 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:24 crc kubenswrapper[4773]: I0122 13:38:24.677504 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bff5894d-1433-4016-9be9-e42afa5f8061" path="/var/lib/kubelet/pods/bff5894d-1433-4016-9be9-e42afa5f8061/volumes" Jan 22 13:38:25 crc kubenswrapper[4773]: W0122 13:38:25.181332 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97e04793_9076_4f7a_8228_d9fabee0b2a7.slice/crio-4e92822b3d58969b15f2df94253af65360f27f5725e6853cec8851755479d15d WatchSource:0}: Error finding container 4e92822b3d58969b15f2df94253af65360f27f5725e6853cec8851755479d15d: Status 404 returned error can't find the container with id 4e92822b3d58969b15f2df94253af65360f27f5725e6853cec8851755479d15d Jan 22 13:38:25 crc kubenswrapper[4773]: I0122 13:38:25.185174 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjxnw"] Jan 22 13:38:25 crc kubenswrapper[4773]: I0122 13:38:25.524250 4773 generic.go:334] "Generic (PLEG): container finished" podID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerID="22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf" exitCode=0 Jan 22 13:38:25 crc kubenswrapper[4773]: I0122 13:38:25.524323 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjxnw" event={"ID":"97e04793-9076-4f7a-8228-d9fabee0b2a7","Type":"ContainerDied","Data":"22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf"} Jan 22 13:38:25 crc kubenswrapper[4773]: I0122 13:38:25.524364 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjxnw" event={"ID":"97e04793-9076-4f7a-8228-d9fabee0b2a7","Type":"ContainerStarted","Data":"4e92822b3d58969b15f2df94253af65360f27f5725e6853cec8851755479d15d"} Jan 22 13:38:27 crc kubenswrapper[4773]: I0122 13:38:27.548305 4773 generic.go:334] "Generic (PLEG): container finished" podID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerID="c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c" exitCode=0 Jan 22 13:38:27 crc kubenswrapper[4773]: I0122 13:38:27.548431 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjxnw" event={"ID":"97e04793-9076-4f7a-8228-d9fabee0b2a7","Type":"ContainerDied","Data":"c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c"} Jan 22 13:38:28 crc kubenswrapper[4773]: I0122 13:38:28.564657 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjxnw" event={"ID":"97e04793-9076-4f7a-8228-d9fabee0b2a7","Type":"ContainerStarted","Data":"efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34"} Jan 22 13:38:28 crc kubenswrapper[4773]: I0122 13:38:28.593113 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mjxnw" podStartSLOduration=2.178224308 podStartE2EDuration="4.593086941s" podCreationTimestamp="2026-01-22 13:38:24 +0000 UTC" firstStartedPulling="2026-01-22 13:38:25.527137299 +0000 UTC m=+6213.105253134" lastFinishedPulling="2026-01-22 13:38:27.941999932 +0000 UTC m=+6215.520115767" observedRunningTime="2026-01-22 13:38:28.586232309 +0000 UTC m=+6216.164348144" watchObservedRunningTime="2026-01-22 13:38:28.593086941 +0000 UTC m=+6216.171202766" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.074692 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.075223 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.075306 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.077245 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cd11c4ae3e84c7a53f963cff7bf88747afac3c498d4204f8269ad676fea8c32b"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.077377 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://cd11c4ae3e84c7a53f963cff7bf88747afac3c498d4204f8269ad676fea8c32b" gracePeriod=600 Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.626351 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="cd11c4ae3e84c7a53f963cff7bf88747afac3c498d4204f8269ad676fea8c32b" exitCode=0 Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.626444 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"cd11c4ae3e84c7a53f963cff7bf88747afac3c498d4204f8269ad676fea8c32b"} Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.626707 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810"} Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.626753 4773 scope.go:117] "RemoveContainer" containerID="e34abb69c4e2bac08a9ab013178750d688245c62a98a7aceee58a77b561e7506" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.671703 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.671789 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:34 crc kubenswrapper[4773]: I0122 13:38:34.735080 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:35 crc kubenswrapper[4773]: I0122 13:38:35.741505 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:35 crc kubenswrapper[4773]: I0122 13:38:35.823363 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjxnw"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.374814 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7558bc6d45-42mbj"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.383413 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.390325 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.390633 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.394910 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-rv69l" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.396768 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.408547 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7558bc6d45-42mbj"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.438441 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.438749 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-log" containerID="cri-o://c970860f98f655be88245c885168058e0391fa019530eda57b32e8f4dc167366" gracePeriod=30 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.439279 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-httpd" containerID="cri-o://8eda4a136c8d5dd95e331ab944bd359cdd4640180be8b037255ea110865bd120" gracePeriod=30 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.468277 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-config-data\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.468349 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hdh4\" (UniqueName: \"kubernetes.io/projected/5f369587-17dc-4fa8-9aac-5155d9ae0738-kube-api-access-2hdh4\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.468394 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f369587-17dc-4fa8-9aac-5155d9ae0738-horizon-secret-key\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.468504 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-scripts\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.468568 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f369587-17dc-4fa8-9aac-5155d9ae0738-logs\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.507749 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-56b949bc6f-hxx8c"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.509575 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.519886 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.520145 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-log" containerID="cri-o://817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008" gracePeriod=30 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.520329 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-httpd" containerID="cri-o://5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d" gracePeriod=30 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.528463 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56b949bc6f-hxx8c"] Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.571185 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-config-data\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.571234 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hdh4\" (UniqueName: \"kubernetes.io/projected/5f369587-17dc-4fa8-9aac-5155d9ae0738-kube-api-access-2hdh4\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.571263 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f369587-17dc-4fa8-9aac-5155d9ae0738-horizon-secret-key\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.571345 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-scripts\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.571389 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f369587-17dc-4fa8-9aac-5155d9ae0738-logs\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.571851 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f369587-17dc-4fa8-9aac-5155d9ae0738-logs\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.573381 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-scripts\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.580858 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f369587-17dc-4fa8-9aac-5155d9ae0738-horizon-secret-key\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.582213 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-config-data\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.596028 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hdh4\" (UniqueName: \"kubernetes.io/projected/5f369587-17dc-4fa8-9aac-5155d9ae0738-kube-api-access-2hdh4\") pod \"horizon-7558bc6d45-42mbj\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.673690 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvd77\" (UniqueName: \"kubernetes.io/projected/a31568c1-96aa-48db-9a76-0ec13e7cae6a-kube-api-access-kvd77\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.673768 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a31568c1-96aa-48db-9a76-0ec13e7cae6a-horizon-secret-key\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.673820 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-config-data\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.673844 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a31568c1-96aa-48db-9a76-0ec13e7cae6a-logs\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.673979 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-scripts\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.683605 4773 generic.go:334] "Generic (PLEG): container finished" podID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerID="817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008" exitCode=143 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.683698 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0440d00c-cf93-434f-b150-00fba0e9b5c3","Type":"ContainerDied","Data":"817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008"} Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.686086 4773 generic.go:334] "Generic (PLEG): container finished" podID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerID="c970860f98f655be88245c885168058e0391fa019530eda57b32e8f4dc167366" exitCode=143 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.686160 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fa5d8b60-5b94-446a-83df-e780e1d74a73","Type":"ContainerDied","Data":"c970860f98f655be88245c885168058e0391fa019530eda57b32e8f4dc167366"} Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.686370 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mjxnw" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="registry-server" containerID="cri-o://efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34" gracePeriod=2 Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.709180 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.775395 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-scripts\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.775758 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvd77\" (UniqueName: \"kubernetes.io/projected/a31568c1-96aa-48db-9a76-0ec13e7cae6a-kube-api-access-kvd77\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.775789 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a31568c1-96aa-48db-9a76-0ec13e7cae6a-horizon-secret-key\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.775824 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-config-data\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.775842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a31568c1-96aa-48db-9a76-0ec13e7cae6a-logs\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.777370 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-scripts\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.780926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-config-data\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.781634 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a31568c1-96aa-48db-9a76-0ec13e7cae6a-logs\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.787667 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a31568c1-96aa-48db-9a76-0ec13e7cae6a-horizon-secret-key\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.794639 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvd77\" (UniqueName: \"kubernetes.io/projected/a31568c1-96aa-48db-9a76-0ec13e7cae6a-kube-api-access-kvd77\") pod \"horizon-56b949bc6f-hxx8c\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:37 crc kubenswrapper[4773]: I0122 13:38:37.837601 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.271713 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7558bc6d45-42mbj"] Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.336498 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.500105 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-catalog-content\") pod \"97e04793-9076-4f7a-8228-d9fabee0b2a7\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.509776 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xv66\" (UniqueName: \"kubernetes.io/projected/97e04793-9076-4f7a-8228-d9fabee0b2a7-kube-api-access-2xv66\") pod \"97e04793-9076-4f7a-8228-d9fabee0b2a7\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.510230 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-utilities\") pod \"97e04793-9076-4f7a-8228-d9fabee0b2a7\" (UID: \"97e04793-9076-4f7a-8228-d9fabee0b2a7\") " Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.514092 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-utilities" (OuterVolumeSpecName: "utilities") pod "97e04793-9076-4f7a-8228-d9fabee0b2a7" (UID: "97e04793-9076-4f7a-8228-d9fabee0b2a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.525776 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97e04793-9076-4f7a-8228-d9fabee0b2a7-kube-api-access-2xv66" (OuterVolumeSpecName: "kube-api-access-2xv66") pod "97e04793-9076-4f7a-8228-d9fabee0b2a7" (UID: "97e04793-9076-4f7a-8228-d9fabee0b2a7"). InnerVolumeSpecName "kube-api-access-2xv66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.548196 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97e04793-9076-4f7a-8228-d9fabee0b2a7" (UID: "97e04793-9076-4f7a-8228-d9fabee0b2a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.553534 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-56b949bc6f-hxx8c"] Jan 22 13:38:38 crc kubenswrapper[4773]: W0122 13:38:38.555149 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda31568c1_96aa_48db_9a76_0ec13e7cae6a.slice/crio-d01dfec2b86d620b9a7fed9998f62228ce4db4e2e5f12a40ce66d481e0a671f5 WatchSource:0}: Error finding container d01dfec2b86d620b9a7fed9998f62228ce4db4e2e5f12a40ce66d481e0a671f5: Status 404 returned error can't find the container with id d01dfec2b86d620b9a7fed9998f62228ce4db4e2e5f12a40ce66d481e0a671f5 Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.613658 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.613920 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e04793-9076-4f7a-8228-d9fabee0b2a7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.613933 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xv66\" (UniqueName: \"kubernetes.io/projected/97e04793-9076-4f7a-8228-d9fabee0b2a7-kube-api-access-2xv66\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.702903 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b949bc6f-hxx8c" event={"ID":"a31568c1-96aa-48db-9a76-0ec13e7cae6a","Type":"ContainerStarted","Data":"d01dfec2b86d620b9a7fed9998f62228ce4db4e2e5f12a40ce66d481e0a671f5"} Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.708979 4773 generic.go:334] "Generic (PLEG): container finished" podID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerID="efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34" exitCode=0 Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.709053 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mjxnw" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.709120 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjxnw" event={"ID":"97e04793-9076-4f7a-8228-d9fabee0b2a7","Type":"ContainerDied","Data":"efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34"} Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.709151 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mjxnw" event={"ID":"97e04793-9076-4f7a-8228-d9fabee0b2a7","Type":"ContainerDied","Data":"4e92822b3d58969b15f2df94253af65360f27f5725e6853cec8851755479d15d"} Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.709169 4773 scope.go:117] "RemoveContainer" containerID="efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.712613 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7558bc6d45-42mbj" event={"ID":"5f369587-17dc-4fa8-9aac-5155d9ae0738","Type":"ContainerStarted","Data":"5fab145b673295fb5c724f5debec1dea3e5d877d309960b2ddc2db90354bfc30"} Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.743229 4773 scope.go:117] "RemoveContainer" containerID="c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.751401 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjxnw"] Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.760432 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mjxnw"] Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.778858 4773 scope.go:117] "RemoveContainer" containerID="22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.838836 4773 scope.go:117] "RemoveContainer" containerID="efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34" Jan 22 13:38:38 crc kubenswrapper[4773]: E0122 13:38:38.842271 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34\": container with ID starting with efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34 not found: ID does not exist" containerID="efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.842353 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34"} err="failed to get container status \"efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34\": rpc error: code = NotFound desc = could not find container \"efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34\": container with ID starting with efc8f7bc9dead4cfa8c5c24b024434ac3b18ae505f17de7cae901a34cde18e34 not found: ID does not exist" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.842376 4773 scope.go:117] "RemoveContainer" containerID="c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c" Jan 22 13:38:38 crc kubenswrapper[4773]: E0122 13:38:38.843392 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c\": container with ID starting with c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c not found: ID does not exist" containerID="c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.843410 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c"} err="failed to get container status \"c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c\": rpc error: code = NotFound desc = could not find container \"c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c\": container with ID starting with c62aaca4eab9778c9c7da18d8c9baa5125cf45f977077d6b4112d9a4a4be369c not found: ID does not exist" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.843424 4773 scope.go:117] "RemoveContainer" containerID="22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf" Jan 22 13:38:38 crc kubenswrapper[4773]: E0122 13:38:38.846630 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf\": container with ID starting with 22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf not found: ID does not exist" containerID="22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf" Jan 22 13:38:38 crc kubenswrapper[4773]: I0122 13:38:38.846681 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf"} err="failed to get container status \"22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf\": rpc error: code = NotFound desc = could not find container \"22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf\": container with ID starting with 22a1792171aa33a197b3fe662493d1a5c91620fd17f290e6fbaf9e98b07a9daf not found: ID does not exist" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.333340 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56b949bc6f-hxx8c"] Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.375440 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-857f64b994-88lxs"] Jan 22 13:38:40 crc kubenswrapper[4773]: E0122 13:38:40.376065 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="extract-utilities" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.376084 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="extract-utilities" Jan 22 13:38:40 crc kubenswrapper[4773]: E0122 13:38:40.376115 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="registry-server" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.376124 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="registry-server" Jan 22 13:38:40 crc kubenswrapper[4773]: E0122 13:38:40.376143 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="extract-content" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.376151 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="extract-content" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.376397 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" containerName="registry-server" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.378588 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.386672 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.398544 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-857f64b994-88lxs"] Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.450826 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7558bc6d45-42mbj"] Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.487628 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6cdcfc4746-tlv4z"] Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.490127 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.498076 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6cdcfc4746-tlv4z"] Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573086 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-combined-ca-bundle\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573174 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-tls-certs\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573376 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5md6\" (UniqueName: \"kubernetes.io/projected/314dc0ed-793c-4765-9a5e-5ada17679078-kube-api-access-p5md6\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573491 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314dc0ed-793c-4765-9a5e-5ada17679078-logs\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573537 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-secret-key\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573556 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-scripts\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.573581 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-config-data\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.675175 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97e04793-9076-4f7a-8228-d9fabee0b2a7" path="/var/lib/kubelet/pods/97e04793-9076-4f7a-8228-d9fabee0b2a7/volumes" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678076 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-scripts\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678118 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314dc0ed-793c-4765-9a5e-5ada17679078-logs\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678154 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq5kf\" (UniqueName: \"kubernetes.io/projected/746d3447-d12f-4158-89bc-9bb26d157e47-kube-api-access-nq5kf\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678174 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-secret-key\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678194 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-scripts\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678210 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746d3447-d12f-4158-89bc-9bb26d157e47-logs\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678235 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-config-data\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678273 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-secret-key\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678371 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-combined-ca-bundle\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678414 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-tls-certs\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678475 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-tls-certs\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678518 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-combined-ca-bundle\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5md6\" (UniqueName: \"kubernetes.io/projected/314dc0ed-793c-4765-9a5e-5ada17679078-kube-api-access-p5md6\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.678589 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-config-data\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.679459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-scripts\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.679788 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314dc0ed-793c-4765-9a5e-5ada17679078-logs\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.680406 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-config-data\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.688594 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-tls-certs\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.690634 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-secret-key\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.691042 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-combined-ca-bundle\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.701999 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5md6\" (UniqueName: \"kubernetes.io/projected/314dc0ed-793c-4765-9a5e-5ada17679078-kube-api-access-p5md6\") pod \"horizon-857f64b994-88lxs\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.706731 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.735756 4773 generic.go:334] "Generic (PLEG): container finished" podID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerID="8eda4a136c8d5dd95e331ab944bd359cdd4640180be8b037255ea110865bd120" exitCode=0 Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.735830 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fa5d8b60-5b94-446a-83df-e780e1d74a73","Type":"ContainerDied","Data":"8eda4a136c8d5dd95e331ab944bd359cdd4640180be8b037255ea110865bd120"} Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.780630 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-secret-key\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.780793 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-tls-certs\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.780888 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-combined-ca-bundle\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.780967 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-config-data\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.781043 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-scripts\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.781116 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq5kf\" (UniqueName: \"kubernetes.io/projected/746d3447-d12f-4158-89bc-9bb26d157e47-kube-api-access-nq5kf\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.781153 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746d3447-d12f-4158-89bc-9bb26d157e47-logs\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.781654 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746d3447-d12f-4158-89bc-9bb26d157e47-logs\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.785043 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-scripts\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.785997 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-config-data\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.789438 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-tls-certs\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.792149 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-combined-ca-bundle\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.798669 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-secret-key\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.806330 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq5kf\" (UniqueName: \"kubernetes.io/projected/746d3447-d12f-4158-89bc-9bb26d157e47-kube-api-access-nq5kf\") pod \"horizon-6cdcfc4746-tlv4z\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:40 crc kubenswrapper[4773]: I0122 13:38:40.824708 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.194321 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.294973 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svhx7\" (UniqueName: \"kubernetes.io/projected/fa5d8b60-5b94-446a-83df-e780e1d74a73-kube-api-access-svhx7\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.295051 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-config-data\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.295129 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-public-tls-certs\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.295340 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-httpd-run\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.295496 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-scripts\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.295636 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-combined-ca-bundle\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.295689 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-logs\") pod \"fa5d8b60-5b94-446a-83df-e780e1d74a73\" (UID: \"fa5d8b60-5b94-446a-83df-e780e1d74a73\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.296504 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.297591 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.298058 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-logs" (OuterVolumeSpecName: "logs") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.301676 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-scripts" (OuterVolumeSpecName: "scripts") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.302880 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa5d8b60-5b94-446a-83df-e780e1d74a73-kube-api-access-svhx7" (OuterVolumeSpecName: "kube-api-access-svhx7") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "kube-api-access-svhx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.346769 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.372358 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-857f64b994-88lxs"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.378202 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.390585 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.399965 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.399997 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.400010 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa5d8b60-5b94-446a-83df-e780e1d74a73-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.400022 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svhx7\" (UniqueName: \"kubernetes.io/projected/fa5d8b60-5b94-446a-83df-e780e1d74a73-kube-api-access-svhx7\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.400037 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.436072 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-config-data" (OuterVolumeSpecName: "config-data") pod "fa5d8b60-5b94-446a-83df-e780e1d74a73" (UID: "fa5d8b60-5b94-446a-83df-e780e1d74a73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501568 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-config-data\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501633 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-combined-ca-bundle\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501702 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-logs\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501753 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-scripts\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501781 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-httpd-run\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501902 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwggh\" (UniqueName: \"kubernetes.io/projected/0440d00c-cf93-434f-b150-00fba0e9b5c3-kube-api-access-dwggh\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.501942 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-internal-tls-certs\") pod \"0440d00c-cf93-434f-b150-00fba0e9b5c3\" (UID: \"0440d00c-cf93-434f-b150-00fba0e9b5c3\") " Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.502272 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.502369 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-logs" (OuterVolumeSpecName: "logs") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.502857 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa5d8b60-5b94-446a-83df-e780e1d74a73-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.502880 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.502889 4773 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0440d00c-cf93-434f-b150-00fba0e9b5c3-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.505910 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-scripts" (OuterVolumeSpecName: "scripts") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.508037 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0440d00c-cf93-434f-b150-00fba0e9b5c3-kube-api-access-dwggh" (OuterVolumeSpecName: "kube-api-access-dwggh") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "kube-api-access-dwggh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.537545 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6cdcfc4746-tlv4z"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.542005 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.567779 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.573014 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-config-data" (OuterVolumeSpecName: "config-data") pod "0440d00c-cf93-434f-b150-00fba0e9b5c3" (UID: "0440d00c-cf93-434f-b150-00fba0e9b5c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.605186 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.605292 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.605308 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.605319 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwggh\" (UniqueName: \"kubernetes.io/projected/0440d00c-cf93-434f-b150-00fba0e9b5c3-kube-api-access-dwggh\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.605327 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0440d00c-cf93-434f-b150-00fba0e9b5c3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.755731 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-857f64b994-88lxs" event={"ID":"314dc0ed-793c-4765-9a5e-5ada17679078","Type":"ContainerStarted","Data":"e44c26033d1f4096a84013ddb81af92c32752ce059f872ec8a205cc18b90c9e5"} Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.757689 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdcfc4746-tlv4z" event={"ID":"746d3447-d12f-4158-89bc-9bb26d157e47","Type":"ContainerStarted","Data":"7c33027f4936c80d6e9da57eb2881a20e1c35c60ef51605a22cb559a7a5acd00"} Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.760048 4773 generic.go:334] "Generic (PLEG): container finished" podID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerID="5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d" exitCode=0 Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.760106 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0440d00c-cf93-434f-b150-00fba0e9b5c3","Type":"ContainerDied","Data":"5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d"} Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.760137 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0440d00c-cf93-434f-b150-00fba0e9b5c3","Type":"ContainerDied","Data":"2090e992faa73dee2aa6f9faf6299600b13f3c2b692dad81be681cbaa337d185"} Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.760186 4773 scope.go:117] "RemoveContainer" containerID="5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.760230 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.762376 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"fa5d8b60-5b94-446a-83df-e780e1d74a73","Type":"ContainerDied","Data":"ff3ef2a5c1d079662ff035b0246630e2eb48ecd347ea885f43cf9492a32c5936"} Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.762522 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.826049 4773 scope.go:117] "RemoveContainer" containerID="817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.832495 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.876355 4773 scope.go:117] "RemoveContainer" containerID="5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.876857 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: E0122 13:38:41.880594 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d\": container with ID starting with 5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d not found: ID does not exist" containerID="5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.880865 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d"} err="failed to get container status \"5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d\": rpc error: code = NotFound desc = could not find container \"5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d\": container with ID starting with 5754b752babac5a412b9225d49fdd69116f56d67d9b2cb14e51416eeba7ad27d not found: ID does not exist" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.880899 4773 scope.go:117] "RemoveContainer" containerID="817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008" Jan 22 13:38:41 crc kubenswrapper[4773]: E0122 13:38:41.883054 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008\": container with ID starting with 817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008 not found: ID does not exist" containerID="817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.883096 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008"} err="failed to get container status \"817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008\": rpc error: code = NotFound desc = could not find container \"817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008\": container with ID starting with 817efdefeb30cbe4de69662d0cee72869a434168ee1e7164da620008c9ce5008 not found: ID does not exist" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.883125 4773 scope.go:117] "RemoveContainer" containerID="8eda4a136c8d5dd95e331ab944bd359cdd4640180be8b037255ea110865bd120" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.890449 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.913584 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: E0122 13:38:41.914037 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-httpd" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914057 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-httpd" Jan 22 13:38:41 crc kubenswrapper[4773]: E0122 13:38:41.914072 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-httpd" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914080 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-httpd" Jan 22 13:38:41 crc kubenswrapper[4773]: E0122 13:38:41.914105 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-log" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914123 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-log" Jan 22 13:38:41 crc kubenswrapper[4773]: E0122 13:38:41.914152 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-log" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914159 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-log" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914468 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-httpd" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914488 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-httpd" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914503 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" containerName="glance-log" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.914518 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" containerName="glance-log" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.915847 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.917763 4773 scope.go:117] "RemoveContainer" containerID="c970860f98f655be88245c885168058e0391fa019530eda57b32e8f4dc167366" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.918174 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.918501 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.919984 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.927621 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-26scd" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.940620 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.956892 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.967954 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.972059 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.974778 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.974987 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 22 13:38:41 crc kubenswrapper[4773]: I0122 13:38:41.980057 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.019659 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9820bd1-4334-42ed-94ad-fe0c21440458-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.019778 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggvmh\" (UniqueName: \"kubernetes.io/projected/f9820bd1-4334-42ed-94ad-fe0c21440458-kube-api-access-ggvmh\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.019817 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.019875 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.019908 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.019999 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.020022 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9820bd1-4334-42ed-94ad-fe0c21440458-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123005 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-scripts\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123076 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123127 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-config-data\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123161 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123186 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123355 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9pgj\" (UniqueName: \"kubernetes.io/projected/5464273f-aca6-46a1-bfdb-15c7d174be31-kube-api-access-m9pgj\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123450 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123547 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9820bd1-4334-42ed-94ad-fe0c21440458-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.123695 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9820bd1-4334-42ed-94ad-fe0c21440458-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.126187 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9820bd1-4334-42ed-94ad-fe0c21440458-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.126564 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5464273f-aca6-46a1-bfdb-15c7d174be31-logs\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.127000 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggvmh\" (UniqueName: \"kubernetes.io/projected/f9820bd1-4334-42ed-94ad-fe0c21440458-kube-api-access-ggvmh\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.127115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9820bd1-4334-42ed-94ad-fe0c21440458-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.127346 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.127595 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5464273f-aca6-46a1-bfdb-15c7d174be31-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.127957 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.132972 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.134415 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.134702 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9820bd1-4334-42ed-94ad-fe0c21440458-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.146052 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggvmh\" (UniqueName: \"kubernetes.io/projected/f9820bd1-4334-42ed-94ad-fe0c21440458-kube-api-access-ggvmh\") pod \"glance-default-internal-api-0\" (UID: \"f9820bd1-4334-42ed-94ad-fe0c21440458\") " pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.229934 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5464273f-aca6-46a1-bfdb-15c7d174be31-logs\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.230047 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5464273f-aca6-46a1-bfdb-15c7d174be31-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.230074 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-scripts\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.230112 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-config-data\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.230143 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.230173 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9pgj\" (UniqueName: \"kubernetes.io/projected/5464273f-aca6-46a1-bfdb-15c7d174be31-kube-api-access-m9pgj\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.230222 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.232086 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5464273f-aca6-46a1-bfdb-15c7d174be31-logs\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.232719 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5464273f-aca6-46a1-bfdb-15c7d174be31-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.247219 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.253859 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-scripts\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.254024 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.254780 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-config-data\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.255739 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9pgj\" (UniqueName: \"kubernetes.io/projected/5464273f-aca6-46a1-bfdb-15c7d174be31-kube-api-access-m9pgj\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.256145 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5464273f-aca6-46a1-bfdb-15c7d174be31-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5464273f-aca6-46a1-bfdb-15c7d174be31\") " pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.289911 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.671590 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0440d00c-cf93-434f-b150-00fba0e9b5c3" path="/var/lib/kubelet/pods/0440d00c-cf93-434f-b150-00fba0e9b5c3/volumes" Jan 22 13:38:42 crc kubenswrapper[4773]: I0122 13:38:42.672810 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa5d8b60-5b94-446a-83df-e780e1d74a73" path="/var/lib/kubelet/pods/fa5d8b60-5b94-446a-83df-e780e1d74a73/volumes" Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.486700 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.663868 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 22 13:38:48 crc kubenswrapper[4773]: W0122 13:38:47.680823 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5464273f_aca6_46a1_bfdb_15c7d174be31.slice/crio-b57bd7ec72ae756abf22abeeaed49c2005f25cd9a03fb34725d1772dcab072fb WatchSource:0}: Error finding container b57bd7ec72ae756abf22abeeaed49c2005f25cd9a03fb34725d1772dcab072fb: Status 404 returned error can't find the container with id b57bd7ec72ae756abf22abeeaed49c2005f25cd9a03fb34725d1772dcab072fb Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.851958 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5464273f-aca6-46a1-bfdb-15c7d174be31","Type":"ContainerStarted","Data":"b57bd7ec72ae756abf22abeeaed49c2005f25cd9a03fb34725d1772dcab072fb"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.856567 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b949bc6f-hxx8c" event={"ID":"a31568c1-96aa-48db-9a76-0ec13e7cae6a","Type":"ContainerStarted","Data":"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.856601 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b949bc6f-hxx8c" event={"ID":"a31568c1-96aa-48db-9a76-0ec13e7cae6a","Type":"ContainerStarted","Data":"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.856797 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-56b949bc6f-hxx8c" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon-log" containerID="cri-o://94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc" gracePeriod=30 Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.857132 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-56b949bc6f-hxx8c" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon" containerID="cri-o://8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4" gracePeriod=30 Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.864694 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdcfc4746-tlv4z" event={"ID":"746d3447-d12f-4158-89bc-9bb26d157e47","Type":"ContainerStarted","Data":"3be82d96f92886f3fc9bdff193385c39a68d54ae4fc48a63ebc3257fafbe1407"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.864736 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdcfc4746-tlv4z" event={"ID":"746d3447-d12f-4158-89bc-9bb26d157e47","Type":"ContainerStarted","Data":"26e525b123f04cad1f329caae148814476a7b9a39d7bf6bdbcbfc4cc5dae7d98"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.868148 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7558bc6d45-42mbj" event={"ID":"5f369587-17dc-4fa8-9aac-5155d9ae0738","Type":"ContainerStarted","Data":"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.868171 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7558bc6d45-42mbj" event={"ID":"5f369587-17dc-4fa8-9aac-5155d9ae0738","Type":"ContainerStarted","Data":"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.868255 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7558bc6d45-42mbj" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon-log" containerID="cri-o://f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5" gracePeriod=30 Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.868522 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7558bc6d45-42mbj" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon" containerID="cri-o://34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333" gracePeriod=30 Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.884105 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-857f64b994-88lxs" event={"ID":"314dc0ed-793c-4765-9a5e-5ada17679078","Type":"ContainerStarted","Data":"1a366491e858a37cbde7e4acf2ec962e4c401a45f14d158448ca5291419bad05"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.884153 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-857f64b994-88lxs" event={"ID":"314dc0ed-793c-4765-9a5e-5ada17679078","Type":"ContainerStarted","Data":"fb14b98a11e9e5a3918ef6e2749eac04388344e802be6168b2a7a5d0b22a1bd0"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.887792 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9820bd1-4334-42ed-94ad-fe0c21440458","Type":"ContainerStarted","Data":"fd8ed206f44a5cb402cd5498e5a7650fee9038196fd609715e74dc57644085ae"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.891477 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-56b949bc6f-hxx8c" podStartSLOduration=2.37483841 podStartE2EDuration="10.891462836s" podCreationTimestamp="2026-01-22 13:38:37 +0000 UTC" firstStartedPulling="2026-01-22 13:38:38.557866238 +0000 UTC m=+6226.135982063" lastFinishedPulling="2026-01-22 13:38:47.074490664 +0000 UTC m=+6234.652606489" observedRunningTime="2026-01-22 13:38:47.882736231 +0000 UTC m=+6235.460852076" watchObservedRunningTime="2026-01-22 13:38:47.891462836 +0000 UTC m=+6235.469578661" Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.912660 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7558bc6d45-42mbj" podStartSLOduration=2.142732007 podStartE2EDuration="10.912637851s" podCreationTimestamp="2026-01-22 13:38:37 +0000 UTC" firstStartedPulling="2026-01-22 13:38:38.268693261 +0000 UTC m=+6225.846809086" lastFinishedPulling="2026-01-22 13:38:47.038599105 +0000 UTC m=+6234.616714930" observedRunningTime="2026-01-22 13:38:47.901356154 +0000 UTC m=+6235.479471979" watchObservedRunningTime="2026-01-22 13:38:47.912637851 +0000 UTC m=+6235.490753676" Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.924196 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6cdcfc4746-tlv4z" podStartSLOduration=2.311285521 podStartE2EDuration="7.924175176s" podCreationTimestamp="2026-01-22 13:38:40 +0000 UTC" firstStartedPulling="2026-01-22 13:38:41.544342255 +0000 UTC m=+6229.122458080" lastFinishedPulling="2026-01-22 13:38:47.1572319 +0000 UTC m=+6234.735347735" observedRunningTime="2026-01-22 13:38:47.921804559 +0000 UTC m=+6235.499920394" watchObservedRunningTime="2026-01-22 13:38:47.924175176 +0000 UTC m=+6235.502291001" Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:47.968437 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-857f64b994-88lxs" podStartSLOduration=2.284116417 podStartE2EDuration="7.968410109s" podCreationTimestamp="2026-01-22 13:38:40 +0000 UTC" firstStartedPulling="2026-01-22 13:38:41.390502351 +0000 UTC m=+6228.968618176" lastFinishedPulling="2026-01-22 13:38:47.074796033 +0000 UTC m=+6234.652911868" observedRunningTime="2026-01-22 13:38:47.955971299 +0000 UTC m=+6235.534087124" watchObservedRunningTime="2026-01-22 13:38:47.968410109 +0000 UTC m=+6235.546525934" Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:48.900363 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9820bd1-4334-42ed-94ad-fe0c21440458","Type":"ContainerStarted","Data":"79b0aa8c8452d760b3608489f795a7855169e13e5e81354182d8fd08c3b98e25"} Jan 22 13:38:48 crc kubenswrapper[4773]: I0122 13:38:48.903036 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5464273f-aca6-46a1-bfdb-15c7d174be31","Type":"ContainerStarted","Data":"51041d85a1c44476709ad5adf3c4499d4905be31955c5737503cd84c459a4229"} Jan 22 13:38:49 crc kubenswrapper[4773]: I0122 13:38:49.991492 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5464273f-aca6-46a1-bfdb-15c7d174be31","Type":"ContainerStarted","Data":"2569fba729588e2c0ddc63c0bb342e4304d29cfe8bcbcb15d94990394bc26828"} Jan 22 13:38:49 crc kubenswrapper[4773]: I0122 13:38:49.994384 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9820bd1-4334-42ed-94ad-fe0c21440458","Type":"ContainerStarted","Data":"6d2a8fd441dc0fabc42adea14bca533a6c512b9281318c660feab4a856ac3b3e"} Jan 22 13:38:50 crc kubenswrapper[4773]: I0122 13:38:50.026202 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.026176334 podStartE2EDuration="9.026176334s" podCreationTimestamp="2026-01-22 13:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:38:50.017007067 +0000 UTC m=+6237.595122902" watchObservedRunningTime="2026-01-22 13:38:50.026176334 +0000 UTC m=+6237.604292159" Jan 22 13:38:50 crc kubenswrapper[4773]: I0122 13:38:50.045208 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.045184679 podStartE2EDuration="9.045184679s" podCreationTimestamp="2026-01-22 13:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:38:50.043646685 +0000 UTC m=+6237.621762520" watchObservedRunningTime="2026-01-22 13:38:50.045184679 +0000 UTC m=+6237.623300504" Jan 22 13:38:50 crc kubenswrapper[4773]: I0122 13:38:50.707206 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:50 crc kubenswrapper[4773]: I0122 13:38:50.707345 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:38:50 crc kubenswrapper[4773]: I0122 13:38:50.826645 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:50 crc kubenswrapper[4773]: I0122 13:38:50.827893 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:38:51 crc kubenswrapper[4773]: I0122 13:38:51.047122 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7abc-account-create-update-qx5vc"] Jan 22 13:38:51 crc kubenswrapper[4773]: I0122 13:38:51.059665 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-mxljw"] Jan 22 13:38:51 crc kubenswrapper[4773]: I0122 13:38:51.068380 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-mxljw"] Jan 22 13:38:51 crc kubenswrapper[4773]: I0122 13:38:51.078066 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7abc-account-create-update-qx5vc"] Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.247748 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.248069 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.281925 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.291169 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.291229 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.291619 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.333878 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.335852 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.672505 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64ceda11-8e56-4d64-9940-8cca049a03b6" path="/var/lib/kubelet/pods/64ceda11-8e56-4d64-9940-8cca049a03b6/volumes" Jan 22 13:38:52 crc kubenswrapper[4773]: I0122 13:38:52.673136 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4f8a4ca-4ec1-41ee-ac71-a319e1590452" path="/var/lib/kubelet/pods/b4f8a4ca-4ec1-41ee-ac71-a319e1590452/volumes" Jan 22 13:38:53 crc kubenswrapper[4773]: I0122 13:38:53.031957 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:53 crc kubenswrapper[4773]: I0122 13:38:53.032218 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 13:38:53 crc kubenswrapper[4773]: I0122 13:38:53.032243 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 22 13:38:53 crc kubenswrapper[4773]: I0122 13:38:53.032306 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.191927 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.192473 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.194611 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.304562 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.304668 4773 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.307225 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.709814 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:38:57 crc kubenswrapper[4773]: I0122 13:38:57.837744 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:39:00 crc kubenswrapper[4773]: I0122 13:39:00.710540 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-857f64b994-88lxs" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.123:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.123:8443: connect: connection refused" Jan 22 13:39:00 crc kubenswrapper[4773]: I0122 13:39:00.830492 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6cdcfc4746-tlv4z" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.124:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8443: connect: connection refused" Jan 22 13:39:05 crc kubenswrapper[4773]: I0122 13:39:05.043686 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-s85mn"] Jan 22 13:39:05 crc kubenswrapper[4773]: I0122 13:39:05.053856 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-s85mn"] Jan 22 13:39:06 crc kubenswrapper[4773]: I0122 13:39:06.225159 4773 scope.go:117] "RemoveContainer" containerID="0643999f89bb71dceba31e605d43e2cca362efaa3b63b043692b81eff1bba382" Jan 22 13:39:06 crc kubenswrapper[4773]: I0122 13:39:06.264531 4773 scope.go:117] "RemoveContainer" containerID="57f54bfb38247bec792c1024bbda41dfa392b8d89501bc7218dc262913a2da4a" Jan 22 13:39:06 crc kubenswrapper[4773]: I0122 13:39:06.315269 4773 scope.go:117] "RemoveContainer" containerID="144f4f2a462f9ff41c5298d990126301bdbc821ed8a59f706182ceb307d666b3" Jan 22 13:39:06 crc kubenswrapper[4773]: I0122 13:39:06.380944 4773 scope.go:117] "RemoveContainer" containerID="5d92204b8da9af240262eed1d62e22d7c9f26a08d661e04bed96f179b763132f" Jan 22 13:39:06 crc kubenswrapper[4773]: I0122 13:39:06.452465 4773 scope.go:117] "RemoveContainer" containerID="bb1253c9131006089af2a9bb941c795ab501dcff494cb73288e67aa4172a2c97" Jan 22 13:39:06 crc kubenswrapper[4773]: I0122 13:39:06.672996 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d" path="/var/lib/kubelet/pods/b264c7ce-d6d4-4c9b-9cc4-9567db7fd96d/volumes" Jan 22 13:39:12 crc kubenswrapper[4773]: I0122 13:39:12.644058 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:39:12 crc kubenswrapper[4773]: I0122 13:39:12.646389 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:39:14 crc kubenswrapper[4773]: I0122 13:39:14.499055 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:39:14 crc kubenswrapper[4773]: I0122 13:39:14.543197 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:39:14 crc kubenswrapper[4773]: I0122 13:39:14.622962 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-857f64b994-88lxs"] Jan 22 13:39:14 crc kubenswrapper[4773]: I0122 13:39:14.623222 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-857f64b994-88lxs" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon-log" containerID="cri-o://fb14b98a11e9e5a3918ef6e2749eac04388344e802be6168b2a7a5d0b22a1bd0" gracePeriod=30 Jan 22 13:39:14 crc kubenswrapper[4773]: I0122 13:39:14.624474 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-857f64b994-88lxs" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" containerID="cri-o://1a366491e858a37cbde7e4acf2ec962e4c401a45f14d158448ca5291419bad05" gracePeriod=30 Jan 22 13:39:18 crc kubenswrapper[4773]: E0122 13:39:18.200412 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f369587_17dc_4fa8_9aac_5155d9ae0738.slice/crio-34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333.scope\": RecentStats: unable to find data in memory cache]" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.460991 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.481419 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.596215 4773 generic.go:334] "Generic (PLEG): container finished" podID="314dc0ed-793c-4765-9a5e-5ada17679078" containerID="1a366491e858a37cbde7e4acf2ec962e4c401a45f14d158448ca5291419bad05" exitCode=0 Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.596277 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-857f64b994-88lxs" event={"ID":"314dc0ed-793c-4765-9a5e-5ada17679078","Type":"ContainerDied","Data":"1a366491e858a37cbde7e4acf2ec962e4c401a45f14d158448ca5291419bad05"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597585 4773 generic.go:334] "Generic (PLEG): container finished" podID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerID="8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4" exitCode=137 Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597611 4773 generic.go:334] "Generic (PLEG): container finished" podID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerID="94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc" exitCode=137 Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597643 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b949bc6f-hxx8c" event={"ID":"a31568c1-96aa-48db-9a76-0ec13e7cae6a","Type":"ContainerDied","Data":"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597660 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b949bc6f-hxx8c" event={"ID":"a31568c1-96aa-48db-9a76-0ec13e7cae6a","Type":"ContainerDied","Data":"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597670 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-56b949bc6f-hxx8c" event={"ID":"a31568c1-96aa-48db-9a76-0ec13e7cae6a","Type":"ContainerDied","Data":"d01dfec2b86d620b9a7fed9998f62228ce4db4e2e5f12a40ce66d481e0a671f5"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597686 4773 scope.go:117] "RemoveContainer" containerID="8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.597810 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-56b949bc6f-hxx8c" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.600983 4773 generic.go:334] "Generic (PLEG): container finished" podID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerID="34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333" exitCode=137 Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.601019 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7558bc6d45-42mbj" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.601031 4773 generic.go:334] "Generic (PLEG): container finished" podID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerID="f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5" exitCode=137 Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.601054 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7558bc6d45-42mbj" event={"ID":"5f369587-17dc-4fa8-9aac-5155d9ae0738","Type":"ContainerDied","Data":"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.601085 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7558bc6d45-42mbj" event={"ID":"5f369587-17dc-4fa8-9aac-5155d9ae0738","Type":"ContainerDied","Data":"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.601097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7558bc6d45-42mbj" event={"ID":"5f369587-17dc-4fa8-9aac-5155d9ae0738","Type":"ContainerDied","Data":"5fab145b673295fb5c724f5debec1dea3e5d877d309960b2ddc2db90354bfc30"} Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.642020 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-config-data\") pod \"5f369587-17dc-4fa8-9aac-5155d9ae0738\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.642503 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a31568c1-96aa-48db-9a76-0ec13e7cae6a-logs\") pod \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.642670 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a31568c1-96aa-48db-9a76-0ec13e7cae6a-horizon-secret-key\") pod \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643032 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a31568c1-96aa-48db-9a76-0ec13e7cae6a-logs" (OuterVolumeSpecName: "logs") pod "a31568c1-96aa-48db-9a76-0ec13e7cae6a" (UID: "a31568c1-96aa-48db-9a76-0ec13e7cae6a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643254 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hdh4\" (UniqueName: \"kubernetes.io/projected/5f369587-17dc-4fa8-9aac-5155d9ae0738-kube-api-access-2hdh4\") pod \"5f369587-17dc-4fa8-9aac-5155d9ae0738\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643439 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-config-data\") pod \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643544 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f369587-17dc-4fa8-9aac-5155d9ae0738-horizon-secret-key\") pod \"5f369587-17dc-4fa8-9aac-5155d9ae0738\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643657 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvd77\" (UniqueName: \"kubernetes.io/projected/a31568c1-96aa-48db-9a76-0ec13e7cae6a-kube-api-access-kvd77\") pod \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643751 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f369587-17dc-4fa8-9aac-5155d9ae0738-logs\") pod \"5f369587-17dc-4fa8-9aac-5155d9ae0738\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643860 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-scripts\") pod \"5f369587-17dc-4fa8-9aac-5155d9ae0738\" (UID: \"5f369587-17dc-4fa8-9aac-5155d9ae0738\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.643967 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-scripts\") pod \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\" (UID: \"a31568c1-96aa-48db-9a76-0ec13e7cae6a\") " Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.644747 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a31568c1-96aa-48db-9a76-0ec13e7cae6a-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.645973 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f369587-17dc-4fa8-9aac-5155d9ae0738-logs" (OuterVolumeSpecName: "logs") pod "5f369587-17dc-4fa8-9aac-5155d9ae0738" (UID: "5f369587-17dc-4fa8-9aac-5155d9ae0738"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.650915 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f369587-17dc-4fa8-9aac-5155d9ae0738-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5f369587-17dc-4fa8-9aac-5155d9ae0738" (UID: "5f369587-17dc-4fa8-9aac-5155d9ae0738"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.651165 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31568c1-96aa-48db-9a76-0ec13e7cae6a-kube-api-access-kvd77" (OuterVolumeSpecName: "kube-api-access-kvd77") pod "a31568c1-96aa-48db-9a76-0ec13e7cae6a" (UID: "a31568c1-96aa-48db-9a76-0ec13e7cae6a"). InnerVolumeSpecName "kube-api-access-kvd77". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.651660 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31568c1-96aa-48db-9a76-0ec13e7cae6a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a31568c1-96aa-48db-9a76-0ec13e7cae6a" (UID: "a31568c1-96aa-48db-9a76-0ec13e7cae6a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.651719 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f369587-17dc-4fa8-9aac-5155d9ae0738-kube-api-access-2hdh4" (OuterVolumeSpecName: "kube-api-access-2hdh4") pod "5f369587-17dc-4fa8-9aac-5155d9ae0738" (UID: "5f369587-17dc-4fa8-9aac-5155d9ae0738"). InnerVolumeSpecName "kube-api-access-2hdh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.672138 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-config-data" (OuterVolumeSpecName: "config-data") pod "5f369587-17dc-4fa8-9aac-5155d9ae0738" (UID: "5f369587-17dc-4fa8-9aac-5155d9ae0738"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.674206 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-scripts" (OuterVolumeSpecName: "scripts") pod "5f369587-17dc-4fa8-9aac-5155d9ae0738" (UID: "5f369587-17dc-4fa8-9aac-5155d9ae0738"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.676032 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-scripts" (OuterVolumeSpecName: "scripts") pod "a31568c1-96aa-48db-9a76-0ec13e7cae6a" (UID: "a31568c1-96aa-48db-9a76-0ec13e7cae6a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.682694 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-config-data" (OuterVolumeSpecName: "config-data") pod "a31568c1-96aa-48db-9a76-0ec13e7cae6a" (UID: "a31568c1-96aa-48db-9a76-0ec13e7cae6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.748976 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749015 4773 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5f369587-17dc-4fa8-9aac-5155d9ae0738-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749027 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvd77\" (UniqueName: \"kubernetes.io/projected/a31568c1-96aa-48db-9a76-0ec13e7cae6a-kube-api-access-kvd77\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749037 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f369587-17dc-4fa8-9aac-5155d9ae0738-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749045 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749054 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a31568c1-96aa-48db-9a76-0ec13e7cae6a-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749065 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5f369587-17dc-4fa8-9aac-5155d9ae0738-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749073 4773 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a31568c1-96aa-48db-9a76-0ec13e7cae6a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.749087 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hdh4\" (UniqueName: \"kubernetes.io/projected/5f369587-17dc-4fa8-9aac-5155d9ae0738-kube-api-access-2hdh4\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.772984 4773 scope.go:117] "RemoveContainer" containerID="94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.793081 4773 scope.go:117] "RemoveContainer" containerID="8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4" Jan 22 13:39:18 crc kubenswrapper[4773]: E0122 13:39:18.793572 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4\": container with ID starting with 8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4 not found: ID does not exist" containerID="8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.793603 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4"} err="failed to get container status \"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4\": rpc error: code = NotFound desc = could not find container \"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4\": container with ID starting with 8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4 not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.793626 4773 scope.go:117] "RemoveContainer" containerID="94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc" Jan 22 13:39:18 crc kubenswrapper[4773]: E0122 13:39:18.793888 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc\": container with ID starting with 94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc not found: ID does not exist" containerID="94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.793916 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc"} err="failed to get container status \"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc\": rpc error: code = NotFound desc = could not find container \"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc\": container with ID starting with 94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.793929 4773 scope.go:117] "RemoveContainer" containerID="8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.794315 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4"} err="failed to get container status \"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4\": rpc error: code = NotFound desc = could not find container \"8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4\": container with ID starting with 8e3f17f679e76f8d425d52b9dab0bd4781888c8244072b5fe8306f805f04a3b4 not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.794342 4773 scope.go:117] "RemoveContainer" containerID="94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.794661 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc"} err="failed to get container status \"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc\": rpc error: code = NotFound desc = could not find container \"94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc\": container with ID starting with 94fd1cd1f50029d64a242a3f66bd8315df69e1f6adc9a2350679db76d2ec56dc not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.794685 4773 scope.go:117] "RemoveContainer" containerID="34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.945104 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-56b949bc6f-hxx8c"] Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.957484 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-56b949bc6f-hxx8c"] Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.962115 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7558bc6d45-42mbj"] Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.966031 4773 scope.go:117] "RemoveContainer" containerID="f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.973029 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7558bc6d45-42mbj"] Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.984870 4773 scope.go:117] "RemoveContainer" containerID="34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333" Jan 22 13:39:18 crc kubenswrapper[4773]: E0122 13:39:18.985413 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333\": container with ID starting with 34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333 not found: ID does not exist" containerID="34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.985444 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333"} err="failed to get container status \"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333\": rpc error: code = NotFound desc = could not find container \"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333\": container with ID starting with 34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333 not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.985467 4773 scope.go:117] "RemoveContainer" containerID="f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5" Jan 22 13:39:18 crc kubenswrapper[4773]: E0122 13:39:18.985875 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5\": container with ID starting with f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5 not found: ID does not exist" containerID="f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.985896 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5"} err="failed to get container status \"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5\": rpc error: code = NotFound desc = could not find container \"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5\": container with ID starting with f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5 not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.985915 4773 scope.go:117] "RemoveContainer" containerID="34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.986133 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333"} err="failed to get container status \"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333\": rpc error: code = NotFound desc = could not find container \"34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333\": container with ID starting with 34d31a15cfb46a2b0d0e7b2af06de2110c4f3ef9da4621623f5906143fae1333 not found: ID does not exist" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.986153 4773 scope.go:117] "RemoveContainer" containerID="f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5" Jan 22 13:39:18 crc kubenswrapper[4773]: I0122 13:39:18.986555 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5"} err="failed to get container status \"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5\": rpc error: code = NotFound desc = could not find container \"f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5\": container with ID starting with f4e4b3314d6ea80653eba674aa215359226fb0afa24404ed9aa9467d162b96f5 not found: ID does not exist" Jan 22 13:39:20 crc kubenswrapper[4773]: I0122 13:39:20.668321 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" path="/var/lib/kubelet/pods/5f369587-17dc-4fa8-9aac-5155d9ae0738/volumes" Jan 22 13:39:20 crc kubenswrapper[4773]: I0122 13:39:20.669451 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" path="/var/lib/kubelet/pods/a31568c1-96aa-48db-9a76-0ec13e7cae6a/volumes" Jan 22 13:39:20 crc kubenswrapper[4773]: I0122 13:39:20.708019 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-857f64b994-88lxs" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.123:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.123:8443: connect: connection refused" Jan 22 13:39:30 crc kubenswrapper[4773]: I0122 13:39:30.707774 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-857f64b994-88lxs" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.123:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.123:8443: connect: connection refused" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.440672 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7t979"] Jan 22 13:39:39 crc kubenswrapper[4773]: E0122 13:39:39.442339 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.442376 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon" Jan 22 13:39:39 crc kubenswrapper[4773]: E0122 13:39:39.442454 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon-log" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.442475 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon-log" Jan 22 13:39:39 crc kubenswrapper[4773]: E0122 13:39:39.442511 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon-log" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.442528 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon-log" Jan 22 13:39:39 crc kubenswrapper[4773]: E0122 13:39:39.442558 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.442574 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.443044 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon-log" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.443089 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.443138 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a31568c1-96aa-48db-9a76-0ec13e7cae6a" containerName="horizon" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.443205 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f369587-17dc-4fa8-9aac-5155d9ae0738" containerName="horizon-log" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.446610 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.450777 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7t979"] Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.595366 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-catalog-content\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.595741 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fp7mk\" (UniqueName: \"kubernetes.io/projected/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-kube-api-access-fp7mk\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.595845 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-utilities\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.698046 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-catalog-content\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.698195 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fp7mk\" (UniqueName: \"kubernetes.io/projected/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-kube-api-access-fp7mk\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.698214 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-utilities\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.698717 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-utilities\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.698919 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-catalog-content\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.719423 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fp7mk\" (UniqueName: \"kubernetes.io/projected/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-kube-api-access-fp7mk\") pod \"community-operators-7t979\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:39 crc kubenswrapper[4773]: I0122 13:39:39.777645 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:40 crc kubenswrapper[4773]: I0122 13:39:40.395942 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7t979"] Jan 22 13:39:40 crc kubenswrapper[4773]: W0122 13:39:40.401880 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8eed3c6_c3af_4f8f_8c1d_1830c9016898.slice/crio-0523d2115d6cd80032daf65b2c996965187f4cb7de8c026c0eb820dab875ced3 WatchSource:0}: Error finding container 0523d2115d6cd80032daf65b2c996965187f4cb7de8c026c0eb820dab875ced3: Status 404 returned error can't find the container with id 0523d2115d6cd80032daf65b2c996965187f4cb7de8c026c0eb820dab875ced3 Jan 22 13:39:40 crc kubenswrapper[4773]: I0122 13:39:40.708340 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-857f64b994-88lxs" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.123:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.123:8443: connect: connection refused" Jan 22 13:39:40 crc kubenswrapper[4773]: I0122 13:39:40.708778 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:39:40 crc kubenswrapper[4773]: I0122 13:39:40.843824 4773 generic.go:334] "Generic (PLEG): container finished" podID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerID="89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa" exitCode=0 Jan 22 13:39:40 crc kubenswrapper[4773]: I0122 13:39:40.843865 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerDied","Data":"89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa"} Jan 22 13:39:40 crc kubenswrapper[4773]: I0122 13:39:40.843893 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerStarted","Data":"0523d2115d6cd80032daf65b2c996965187f4cb7de8c026c0eb820dab875ced3"} Jan 22 13:39:41 crc kubenswrapper[4773]: I0122 13:39:41.857931 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerStarted","Data":"ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3"} Jan 22 13:39:42 crc kubenswrapper[4773]: I0122 13:39:42.875111 4773 generic.go:334] "Generic (PLEG): container finished" podID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerID="ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3" exitCode=0 Jan 22 13:39:42 crc kubenswrapper[4773]: I0122 13:39:42.875231 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerDied","Data":"ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3"} Jan 22 13:39:43 crc kubenswrapper[4773]: I0122 13:39:43.900272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerStarted","Data":"0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2"} Jan 22 13:39:43 crc kubenswrapper[4773]: I0122 13:39:43.931708 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7t979" podStartSLOduration=2.475827168 podStartE2EDuration="4.931683583s" podCreationTimestamp="2026-01-22 13:39:39 +0000 UTC" firstStartedPulling="2026-01-22 13:39:40.849147575 +0000 UTC m=+6288.427263410" lastFinishedPulling="2026-01-22 13:39:43.305004 +0000 UTC m=+6290.883119825" observedRunningTime="2026-01-22 13:39:43.925826338 +0000 UTC m=+6291.503942223" watchObservedRunningTime="2026-01-22 13:39:43.931683583 +0000 UTC m=+6291.509799408" Jan 22 13:39:44 crc kubenswrapper[4773]: I0122 13:39:44.916964 4773 generic.go:334] "Generic (PLEG): container finished" podID="314dc0ed-793c-4765-9a5e-5ada17679078" containerID="fb14b98a11e9e5a3918ef6e2749eac04388344e802be6168b2a7a5d0b22a1bd0" exitCode=137 Jan 22 13:39:44 crc kubenswrapper[4773]: I0122 13:39:44.918420 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-857f64b994-88lxs" event={"ID":"314dc0ed-793c-4765-9a5e-5ada17679078","Type":"ContainerDied","Data":"fb14b98a11e9e5a3918ef6e2749eac04388344e802be6168b2a7a5d0b22a1bd0"} Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.067464 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131538 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314dc0ed-793c-4765-9a5e-5ada17679078-logs\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131630 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-secret-key\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131664 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-scripts\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131689 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-tls-certs\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131713 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-combined-ca-bundle\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131816 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-config-data\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.131870 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5md6\" (UniqueName: \"kubernetes.io/projected/314dc0ed-793c-4765-9a5e-5ada17679078-kube-api-access-p5md6\") pod \"314dc0ed-793c-4765-9a5e-5ada17679078\" (UID: \"314dc0ed-793c-4765-9a5e-5ada17679078\") " Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.132020 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/314dc0ed-793c-4765-9a5e-5ada17679078-logs" (OuterVolumeSpecName: "logs") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.132333 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/314dc0ed-793c-4765-9a5e-5ada17679078-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.138371 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/314dc0ed-793c-4765-9a5e-5ada17679078-kube-api-access-p5md6" (OuterVolumeSpecName: "kube-api-access-p5md6") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "kube-api-access-p5md6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.145489 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.167383 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.168644 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-scripts" (OuterVolumeSpecName: "scripts") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.171965 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-config-data" (OuterVolumeSpecName: "config-data") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.205096 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "314dc0ed-793c-4765-9a5e-5ada17679078" (UID: "314dc0ed-793c-4765-9a5e-5ada17679078"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.234458 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.234500 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5md6\" (UniqueName: \"kubernetes.io/projected/314dc0ed-793c-4765-9a5e-5ada17679078-kube-api-access-p5md6\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.234511 4773 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.234520 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/314dc0ed-793c-4765-9a5e-5ada17679078-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.234528 4773 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.234538 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/314dc0ed-793c-4765-9a5e-5ada17679078-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.944102 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-857f64b994-88lxs" event={"ID":"314dc0ed-793c-4765-9a5e-5ada17679078","Type":"ContainerDied","Data":"e44c26033d1f4096a84013ddb81af92c32752ce059f872ec8a205cc18b90c9e5"} Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.944170 4773 scope.go:117] "RemoveContainer" containerID="1a366491e858a37cbde7e4acf2ec962e4c401a45f14d158448ca5291419bad05" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.944530 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-857f64b994-88lxs" Jan 22 13:39:45 crc kubenswrapper[4773]: I0122 13:39:45.992867 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-857f64b994-88lxs"] Jan 22 13:39:46 crc kubenswrapper[4773]: I0122 13:39:46.000982 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-857f64b994-88lxs"] Jan 22 13:39:46 crc kubenswrapper[4773]: I0122 13:39:46.138670 4773 scope.go:117] "RemoveContainer" containerID="fb14b98a11e9e5a3918ef6e2749eac04388344e802be6168b2a7a5d0b22a1bd0" Jan 22 13:39:46 crc kubenswrapper[4773]: I0122 13:39:46.670734 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" path="/var/lib/kubelet/pods/314dc0ed-793c-4765-9a5e-5ada17679078/volumes" Jan 22 13:39:49 crc kubenswrapper[4773]: I0122 13:39:49.778696 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:49 crc kubenswrapper[4773]: I0122 13:39:49.779301 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:49 crc kubenswrapper[4773]: I0122 13:39:49.824980 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:50 crc kubenswrapper[4773]: I0122 13:39:50.044089 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:50 crc kubenswrapper[4773]: I0122 13:39:50.097098 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7t979"] Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.015148 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7t979" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="registry-server" containerID="cri-o://0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2" gracePeriod=2 Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.502840 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.696101 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fp7mk\" (UniqueName: \"kubernetes.io/projected/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-kube-api-access-fp7mk\") pod \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.696277 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-catalog-content\") pod \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.696602 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-utilities\") pod \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\" (UID: \"a8eed3c6-c3af-4f8f-8c1d-1830c9016898\") " Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.697866 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-utilities" (OuterVolumeSpecName: "utilities") pod "a8eed3c6-c3af-4f8f-8c1d-1830c9016898" (UID: "a8eed3c6-c3af-4f8f-8c1d-1830c9016898"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.698062 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.719261 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-kube-api-access-fp7mk" (OuterVolumeSpecName: "kube-api-access-fp7mk") pod "a8eed3c6-c3af-4f8f-8c1d-1830c9016898" (UID: "a8eed3c6-c3af-4f8f-8c1d-1830c9016898"). InnerVolumeSpecName "kube-api-access-fp7mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.753770 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8eed3c6-c3af-4f8f-8c1d-1830c9016898" (UID: "a8eed3c6-c3af-4f8f-8c1d-1830c9016898"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.800673 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fp7mk\" (UniqueName: \"kubernetes.io/projected/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-kube-api-access-fp7mk\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:52 crc kubenswrapper[4773]: I0122 13:39:52.800728 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8eed3c6-c3af-4f8f-8c1d-1830c9016898-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.041344 4773 generic.go:334] "Generic (PLEG): container finished" podID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerID="0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2" exitCode=0 Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.041415 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerDied","Data":"0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2"} Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.041461 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7t979" event={"ID":"a8eed3c6-c3af-4f8f-8c1d-1830c9016898","Type":"ContainerDied","Data":"0523d2115d6cd80032daf65b2c996965187f4cb7de8c026c0eb820dab875ced3"} Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.041492 4773 scope.go:117] "RemoveContainer" containerID="0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.041764 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7t979" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.089145 4773 scope.go:117] "RemoveContainer" containerID="ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.115699 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7t979"] Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.129649 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7t979"] Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.143445 4773 scope.go:117] "RemoveContainer" containerID="89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.186456 4773 scope.go:117] "RemoveContainer" containerID="0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2" Jan 22 13:39:53 crc kubenswrapper[4773]: E0122 13:39:53.186935 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2\": container with ID starting with 0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2 not found: ID does not exist" containerID="0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.186977 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2"} err="failed to get container status \"0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2\": rpc error: code = NotFound desc = could not find container \"0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2\": container with ID starting with 0859e336f525782afe592680cb7e25b613d48b4c803c1a3884221573cba4f1a2 not found: ID does not exist" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.187000 4773 scope.go:117] "RemoveContainer" containerID="ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3" Jan 22 13:39:53 crc kubenswrapper[4773]: E0122 13:39:53.187295 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3\": container with ID starting with ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3 not found: ID does not exist" containerID="ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.187325 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3"} err="failed to get container status \"ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3\": rpc error: code = NotFound desc = could not find container \"ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3\": container with ID starting with ae7fdc807af60f1c84a5ca2bdd45329bc451fd1e3c2336a6d43157d3bf3866f3 not found: ID does not exist" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.187338 4773 scope.go:117] "RemoveContainer" containerID="89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa" Jan 22 13:39:53 crc kubenswrapper[4773]: E0122 13:39:53.187647 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa\": container with ID starting with 89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa not found: ID does not exist" containerID="89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa" Jan 22 13:39:53 crc kubenswrapper[4773]: I0122 13:39:53.187677 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa"} err="failed to get container status \"89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa\": rpc error: code = NotFound desc = could not find container \"89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa\": container with ID starting with 89a26a646a5311a1049bc3f54f03ae167633f53486488cbb01b925b0c504c1fa not found: ID does not exist" Jan 22 13:39:54 crc kubenswrapper[4773]: I0122 13:39:54.674044 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" path="/var/lib/kubelet/pods/a8eed3c6-c3af-4f8f-8c1d-1830c9016898/volumes" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.091811 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-64d88cbf74-c2bxb"] Jan 22 13:39:55 crc kubenswrapper[4773]: E0122 13:39:55.092551 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon-log" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092570 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon-log" Jan 22 13:39:55 crc kubenswrapper[4773]: E0122 13:39:55.092579 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="registry-server" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092588 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="registry-server" Jan 22 13:39:55 crc kubenswrapper[4773]: E0122 13:39:55.092615 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092622 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" Jan 22 13:39:55 crc kubenswrapper[4773]: E0122 13:39:55.092637 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="extract-utilities" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092643 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="extract-utilities" Jan 22 13:39:55 crc kubenswrapper[4773]: E0122 13:39:55.092661 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="extract-content" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092668 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="extract-content" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092846 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon-log" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092862 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="314dc0ed-793c-4765-9a5e-5ada17679078" containerName="horizon" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.092882 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8eed3c6-c3af-4f8f-8c1d-1830c9016898" containerName="registry-server" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.093940 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.108025 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64d88cbf74-c2bxb"] Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.293387 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-horizon-secret-key\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.293552 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-horizon-tls-certs\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.293620 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16615494-47e4-428d-b631-e41b55192f74-logs\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.293769 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16615494-47e4-428d-b631-e41b55192f74-config-data\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.293947 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhf57\" (UniqueName: \"kubernetes.io/projected/16615494-47e4-428d-b631-e41b55192f74-kube-api-access-hhf57\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.294175 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16615494-47e4-428d-b631-e41b55192f74-scripts\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.294327 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-combined-ca-bundle\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.397274 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-combined-ca-bundle\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.397522 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-horizon-secret-key\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.397699 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-horizon-tls-certs\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.397770 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16615494-47e4-428d-b631-e41b55192f74-logs\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.397821 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16615494-47e4-428d-b631-e41b55192f74-config-data\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.398007 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhf57\" (UniqueName: \"kubernetes.io/projected/16615494-47e4-428d-b631-e41b55192f74-kube-api-access-hhf57\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.398172 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16615494-47e4-428d-b631-e41b55192f74-scripts\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.398172 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16615494-47e4-428d-b631-e41b55192f74-logs\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.398853 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16615494-47e4-428d-b631-e41b55192f74-scripts\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.399275 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/16615494-47e4-428d-b631-e41b55192f74-config-data\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.404277 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-horizon-secret-key\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.404677 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-horizon-tls-certs\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.407061 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16615494-47e4-428d-b631-e41b55192f74-combined-ca-bundle\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.423279 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhf57\" (UniqueName: \"kubernetes.io/projected/16615494-47e4-428d-b631-e41b55192f74-kube-api-access-hhf57\") pod \"horizon-64d88cbf74-c2bxb\" (UID: \"16615494-47e4-428d-b631-e41b55192f74\") " pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:55 crc kubenswrapper[4773]: I0122 13:39:55.711699 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.366890 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-64d88cbf74-c2bxb"] Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.774040 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-ht6ks"] Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.776586 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.797716 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-ht6ks"] Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.835823 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-3c56-account-create-update-8fhjd"] Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.837447 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.839480 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.863581 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-3c56-account-create-update-8fhjd"] Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.951058 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbn5k\" (UniqueName: \"kubernetes.io/projected/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-kube-api-access-gbn5k\") pod \"heat-3c56-account-create-update-8fhjd\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.953327 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/933daae7-11ca-4cde-9008-74ca1b6cbcb4-kube-api-access-w7p8h\") pod \"heat-db-create-ht6ks\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.953425 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/933daae7-11ca-4cde-9008-74ca1b6cbcb4-operator-scripts\") pod \"heat-db-create-ht6ks\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:56 crc kubenswrapper[4773]: I0122 13:39:56.954442 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-operator-scripts\") pod \"heat-3c56-account-create-update-8fhjd\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.057860 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbn5k\" (UniqueName: \"kubernetes.io/projected/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-kube-api-access-gbn5k\") pod \"heat-3c56-account-create-update-8fhjd\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.058349 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/933daae7-11ca-4cde-9008-74ca1b6cbcb4-kube-api-access-w7p8h\") pod \"heat-db-create-ht6ks\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.058545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/933daae7-11ca-4cde-9008-74ca1b6cbcb4-operator-scripts\") pod \"heat-db-create-ht6ks\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.058597 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-operator-scripts\") pod \"heat-3c56-account-create-update-8fhjd\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.059350 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-operator-scripts\") pod \"heat-3c56-account-create-update-8fhjd\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.059382 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/933daae7-11ca-4cde-9008-74ca1b6cbcb4-operator-scripts\") pod \"heat-db-create-ht6ks\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.075133 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/933daae7-11ca-4cde-9008-74ca1b6cbcb4-kube-api-access-w7p8h\") pod \"heat-db-create-ht6ks\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.078926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbn5k\" (UniqueName: \"kubernetes.io/projected/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-kube-api-access-gbn5k\") pod \"heat-3c56-account-create-update-8fhjd\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.101487 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d88cbf74-c2bxb" event={"ID":"16615494-47e4-428d-b631-e41b55192f74","Type":"ContainerStarted","Data":"2e0616fbe5e36152708048c2ef5e18fc570be8f799c2dd427b7601428b0576db"} Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.101533 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d88cbf74-c2bxb" event={"ID":"16615494-47e4-428d-b631-e41b55192f74","Type":"ContainerStarted","Data":"36bf5a2b0588ccf9bba6a8e75cbb31f8363f67b80dfd7dec218ad8a07b8817b0"} Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.101544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-64d88cbf74-c2bxb" event={"ID":"16615494-47e4-428d-b631-e41b55192f74","Type":"ContainerStarted","Data":"fd741dcfcce67337f665361cbd91ce338f3a0d38cc46e434bfdfef2f5a60d1ef"} Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.129017 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-64d88cbf74-c2bxb" podStartSLOduration=2.128995241 podStartE2EDuration="2.128995241s" podCreationTimestamp="2026-01-22 13:39:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:39:57.122161219 +0000 UTC m=+6304.700277054" watchObservedRunningTime="2026-01-22 13:39:57.128995241 +0000 UTC m=+6304.707111066" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.226838 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ht6ks" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.235793 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.786976 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-ht6ks"] Jan 22 13:39:57 crc kubenswrapper[4773]: I0122 13:39:57.855963 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-3c56-account-create-update-8fhjd"] Jan 22 13:39:57 crc kubenswrapper[4773]: W0122 13:39:57.862581 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39f5a860_7df2_4ef7_a42e_e5e5baba0fba.slice/crio-b0d7ef8257d3221bb93cf2e5e98d79705e6d3acfe34d0cdcfee60e14580f5242 WatchSource:0}: Error finding container b0d7ef8257d3221bb93cf2e5e98d79705e6d3acfe34d0cdcfee60e14580f5242: Status 404 returned error can't find the container with id b0d7ef8257d3221bb93cf2e5e98d79705e6d3acfe34d0cdcfee60e14580f5242 Jan 22 13:39:58 crc kubenswrapper[4773]: I0122 13:39:58.116303 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ht6ks" event={"ID":"933daae7-11ca-4cde-9008-74ca1b6cbcb4","Type":"ContainerStarted","Data":"ee6de0b555c1fb64b932800bbf6209e4bab9e688301441d8a8879d9e22a744e6"} Jan 22 13:39:58 crc kubenswrapper[4773]: I0122 13:39:58.116350 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ht6ks" event={"ID":"933daae7-11ca-4cde-9008-74ca1b6cbcb4","Type":"ContainerStarted","Data":"70434220b76c07d2cc0ac2cb0c6d2d73c0967f179e1d331fc563145a3848a099"} Jan 22 13:39:58 crc kubenswrapper[4773]: I0122 13:39:58.119196 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c56-account-create-update-8fhjd" event={"ID":"39f5a860-7df2-4ef7-a42e-e5e5baba0fba","Type":"ContainerStarted","Data":"607b1866e38fa2653de84abdf51a18d3c10580b3854250bc7afbcd58cc6d0a30"} Jan 22 13:39:58 crc kubenswrapper[4773]: I0122 13:39:58.119237 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c56-account-create-update-8fhjd" event={"ID":"39f5a860-7df2-4ef7-a42e-e5e5baba0fba","Type":"ContainerStarted","Data":"b0d7ef8257d3221bb93cf2e5e98d79705e6d3acfe34d0cdcfee60e14580f5242"} Jan 22 13:39:58 crc kubenswrapper[4773]: I0122 13:39:58.146119 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-create-ht6ks" podStartSLOduration=2.146093357 podStartE2EDuration="2.146093357s" podCreationTimestamp="2026-01-22 13:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:39:58.13302796 +0000 UTC m=+6305.711143785" watchObservedRunningTime="2026-01-22 13:39:58.146093357 +0000 UTC m=+6305.724209182" Jan 22 13:39:58 crc kubenswrapper[4773]: I0122 13:39:58.159737 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-3c56-account-create-update-8fhjd" podStartSLOduration=2.1597101 podStartE2EDuration="2.1597101s" podCreationTimestamp="2026-01-22 13:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:39:58.14975148 +0000 UTC m=+6305.727867305" watchObservedRunningTime="2026-01-22 13:39:58.1597101 +0000 UTC m=+6305.737825925" Jan 22 13:39:59 crc kubenswrapper[4773]: I0122 13:39:59.129047 4773 generic.go:334] "Generic (PLEG): container finished" podID="933daae7-11ca-4cde-9008-74ca1b6cbcb4" containerID="ee6de0b555c1fb64b932800bbf6209e4bab9e688301441d8a8879d9e22a744e6" exitCode=0 Jan 22 13:39:59 crc kubenswrapper[4773]: I0122 13:39:59.129168 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ht6ks" event={"ID":"933daae7-11ca-4cde-9008-74ca1b6cbcb4","Type":"ContainerDied","Data":"ee6de0b555c1fb64b932800bbf6209e4bab9e688301441d8a8879d9e22a744e6"} Jan 22 13:39:59 crc kubenswrapper[4773]: I0122 13:39:59.132832 4773 generic.go:334] "Generic (PLEG): container finished" podID="39f5a860-7df2-4ef7-a42e-e5e5baba0fba" containerID="607b1866e38fa2653de84abdf51a18d3c10580b3854250bc7afbcd58cc6d0a30" exitCode=0 Jan 22 13:39:59 crc kubenswrapper[4773]: I0122 13:39:59.132918 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c56-account-create-update-8fhjd" event={"ID":"39f5a860-7df2-4ef7-a42e-e5e5baba0fba","Type":"ContainerDied","Data":"607b1866e38fa2653de84abdf51a18d3c10580b3854250bc7afbcd58cc6d0a30"} Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.599087 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ht6ks" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.606972 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.774724 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/933daae7-11ca-4cde-9008-74ca1b6cbcb4-operator-scripts\") pod \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.774938 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-operator-scripts\") pod \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.775000 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/933daae7-11ca-4cde-9008-74ca1b6cbcb4-kube-api-access-w7p8h\") pod \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\" (UID: \"933daae7-11ca-4cde-9008-74ca1b6cbcb4\") " Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.775096 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbn5k\" (UniqueName: \"kubernetes.io/projected/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-kube-api-access-gbn5k\") pod \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\" (UID: \"39f5a860-7df2-4ef7-a42e-e5e5baba0fba\") " Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.775527 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/933daae7-11ca-4cde-9008-74ca1b6cbcb4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "933daae7-11ca-4cde-9008-74ca1b6cbcb4" (UID: "933daae7-11ca-4cde-9008-74ca1b6cbcb4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.775664 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39f5a860-7df2-4ef7-a42e-e5e5baba0fba" (UID: "39f5a860-7df2-4ef7-a42e-e5e5baba0fba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.776074 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/933daae7-11ca-4cde-9008-74ca1b6cbcb4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.776102 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.779971 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/933daae7-11ca-4cde-9008-74ca1b6cbcb4-kube-api-access-w7p8h" (OuterVolumeSpecName: "kube-api-access-w7p8h") pod "933daae7-11ca-4cde-9008-74ca1b6cbcb4" (UID: "933daae7-11ca-4cde-9008-74ca1b6cbcb4"). InnerVolumeSpecName "kube-api-access-w7p8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.780225 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-kube-api-access-gbn5k" (OuterVolumeSpecName: "kube-api-access-gbn5k") pod "39f5a860-7df2-4ef7-a42e-e5e5baba0fba" (UID: "39f5a860-7df2-4ef7-a42e-e5e5baba0fba"). InnerVolumeSpecName "kube-api-access-gbn5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.878275 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbn5k\" (UniqueName: \"kubernetes.io/projected/39f5a860-7df2-4ef7-a42e-e5e5baba0fba-kube-api-access-gbn5k\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:00 crc kubenswrapper[4773]: I0122 13:40:00.878342 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7p8h\" (UniqueName: \"kubernetes.io/projected/933daae7-11ca-4cde-9008-74ca1b6cbcb4-kube-api-access-w7p8h\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:01 crc kubenswrapper[4773]: I0122 13:40:01.158951 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-ht6ks" event={"ID":"933daae7-11ca-4cde-9008-74ca1b6cbcb4","Type":"ContainerDied","Data":"70434220b76c07d2cc0ac2cb0c6d2d73c0967f179e1d331fc563145a3848a099"} Jan 22 13:40:01 crc kubenswrapper[4773]: I0122 13:40:01.159017 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70434220b76c07d2cc0ac2cb0c6d2d73c0967f179e1d331fc563145a3848a099" Jan 22 13:40:01 crc kubenswrapper[4773]: I0122 13:40:01.159104 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-ht6ks" Jan 22 13:40:01 crc kubenswrapper[4773]: I0122 13:40:01.165149 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-3c56-account-create-update-8fhjd" event={"ID":"39f5a860-7df2-4ef7-a42e-e5e5baba0fba","Type":"ContainerDied","Data":"b0d7ef8257d3221bb93cf2e5e98d79705e6d3acfe34d0cdcfee60e14580f5242"} Jan 22 13:40:01 crc kubenswrapper[4773]: I0122 13:40:01.165209 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0d7ef8257d3221bb93cf2e5e98d79705e6d3acfe34d0cdcfee60e14580f5242" Jan 22 13:40:01 crc kubenswrapper[4773]: I0122 13:40:01.165255 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-3c56-account-create-update-8fhjd" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.029870 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-g6ml5"] Jan 22 13:40:02 crc kubenswrapper[4773]: E0122 13:40:02.030817 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39f5a860-7df2-4ef7-a42e-e5e5baba0fba" containerName="mariadb-account-create-update" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.030844 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="39f5a860-7df2-4ef7-a42e-e5e5baba0fba" containerName="mariadb-account-create-update" Jan 22 13:40:02 crc kubenswrapper[4773]: E0122 13:40:02.030868 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="933daae7-11ca-4cde-9008-74ca1b6cbcb4" containerName="mariadb-database-create" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.030877 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="933daae7-11ca-4cde-9008-74ca1b6cbcb4" containerName="mariadb-database-create" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.031154 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="39f5a860-7df2-4ef7-a42e-e5e5baba0fba" containerName="mariadb-account-create-update" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.031190 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="933daae7-11ca-4cde-9008-74ca1b6cbcb4" containerName="mariadb-database-create" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.032464 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.035683 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-bdvlk" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.035857 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.055907 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-g6ml5"] Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.213642 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rscn7\" (UniqueName: \"kubernetes.io/projected/d4543c21-3185-489c-827f-742d491ee70c-kube-api-access-rscn7\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.214008 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-combined-ca-bundle\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.214068 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-config-data\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.316464 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-combined-ca-bundle\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.316524 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-config-data\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.316681 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rscn7\" (UniqueName: \"kubernetes.io/projected/d4543c21-3185-489c-827f-742d491ee70c-kube-api-access-rscn7\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.320859 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-combined-ca-bundle\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.321037 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-config-data\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.348831 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rscn7\" (UniqueName: \"kubernetes.io/projected/d4543c21-3185-489c-827f-742d491ee70c-kube-api-access-rscn7\") pod \"heat-db-sync-g6ml5\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.359517 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:02 crc kubenswrapper[4773]: I0122 13:40:02.840964 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-g6ml5"] Jan 22 13:40:03 crc kubenswrapper[4773]: I0122 13:40:03.183848 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-g6ml5" event={"ID":"d4543c21-3185-489c-827f-742d491ee70c","Type":"ContainerStarted","Data":"160576cf9c0d41abdac103f8c99a3b3cf719f8e5984a2562a16805cfd78e48df"} Jan 22 13:40:05 crc kubenswrapper[4773]: I0122 13:40:05.712981 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:40:05 crc kubenswrapper[4773]: I0122 13:40:05.713202 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:40:06 crc kubenswrapper[4773]: I0122 13:40:06.716178 4773 scope.go:117] "RemoveContainer" containerID="29157dfab8645a526b3459aa4ae2b08b8d91a86f031a944e164fa54b04fefca7" Jan 22 13:40:12 crc kubenswrapper[4773]: I0122 13:40:12.289568 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-g6ml5" event={"ID":"d4543c21-3185-489c-827f-742d491ee70c","Type":"ContainerStarted","Data":"d034c0c0bc553f19b007d2313cf284b4a7d346ad689ab0720f0e8897b269c096"} Jan 22 13:40:12 crc kubenswrapper[4773]: I0122 13:40:12.311793 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-g6ml5" podStartSLOduration=2.229045593 podStartE2EDuration="10.311767475s" podCreationTimestamp="2026-01-22 13:40:02 +0000 UTC" firstStartedPulling="2026-01-22 13:40:02.843958694 +0000 UTC m=+6310.422074519" lastFinishedPulling="2026-01-22 13:40:10.926680576 +0000 UTC m=+6318.504796401" observedRunningTime="2026-01-22 13:40:12.310138179 +0000 UTC m=+6319.888254024" watchObservedRunningTime="2026-01-22 13:40:12.311767475 +0000 UTC m=+6319.889883320" Jan 22 13:40:15 crc kubenswrapper[4773]: I0122 13:40:15.319679 4773 generic.go:334] "Generic (PLEG): container finished" podID="d4543c21-3185-489c-827f-742d491ee70c" containerID="d034c0c0bc553f19b007d2313cf284b4a7d346ad689ab0720f0e8897b269c096" exitCode=0 Jan 22 13:40:15 crc kubenswrapper[4773]: I0122 13:40:15.319787 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-g6ml5" event={"ID":"d4543c21-3185-489c-827f-742d491ee70c","Type":"ContainerDied","Data":"d034c0c0bc553f19b007d2313cf284b4a7d346ad689ab0720f0e8897b269c096"} Jan 22 13:40:15 crc kubenswrapper[4773]: I0122 13:40:15.715237 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-64d88cbf74-c2bxb" podUID="16615494-47e4-428d-b631-e41b55192f74" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.128:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.128:8443: connect: connection refused" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.736571 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.893792 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-config-data\") pod \"d4543c21-3185-489c-827f-742d491ee70c\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.894726 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rscn7\" (UniqueName: \"kubernetes.io/projected/d4543c21-3185-489c-827f-742d491ee70c-kube-api-access-rscn7\") pod \"d4543c21-3185-489c-827f-742d491ee70c\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.894884 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-combined-ca-bundle\") pod \"d4543c21-3185-489c-827f-742d491ee70c\" (UID: \"d4543c21-3185-489c-827f-742d491ee70c\") " Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.899683 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4543c21-3185-489c-827f-742d491ee70c-kube-api-access-rscn7" (OuterVolumeSpecName: "kube-api-access-rscn7") pod "d4543c21-3185-489c-827f-742d491ee70c" (UID: "d4543c21-3185-489c-827f-742d491ee70c"). InnerVolumeSpecName "kube-api-access-rscn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.935545 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4543c21-3185-489c-827f-742d491ee70c" (UID: "d4543c21-3185-489c-827f-742d491ee70c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.976543 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-config-data" (OuterVolumeSpecName: "config-data") pod "d4543c21-3185-489c-827f-742d491ee70c" (UID: "d4543c21-3185-489c-827f-742d491ee70c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.999140 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rscn7\" (UniqueName: \"kubernetes.io/projected/d4543c21-3185-489c-827f-742d491ee70c-kube-api-access-rscn7\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.999420 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:16 crc kubenswrapper[4773]: I0122 13:40:16.999571 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4543c21-3185-489c-827f-742d491ee70c-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:17 crc kubenswrapper[4773]: I0122 13:40:17.340552 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-g6ml5" event={"ID":"d4543c21-3185-489c-827f-742d491ee70c","Type":"ContainerDied","Data":"160576cf9c0d41abdac103f8c99a3b3cf719f8e5984a2562a16805cfd78e48df"} Jan 22 13:40:17 crc kubenswrapper[4773]: I0122 13:40:17.340607 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="160576cf9c0d41abdac103f8c99a3b3cf719f8e5984a2562a16805cfd78e48df" Jan 22 13:40:17 crc kubenswrapper[4773]: I0122 13:40:17.340637 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-g6ml5" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.812094 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-66f9c7c857-mcmx8"] Jan 22 13:40:18 crc kubenswrapper[4773]: E0122 13:40:18.812874 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4543c21-3185-489c-827f-742d491ee70c" containerName="heat-db-sync" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.812896 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4543c21-3185-489c-827f-742d491ee70c" containerName="heat-db-sync" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.813155 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4543c21-3185-489c-827f-742d491ee70c" containerName="heat-db-sync" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.813945 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.816930 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.817024 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-bdvlk" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.817178 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.832463 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-66f9c7c857-mcmx8"] Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.842046 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-69bc86474b-hppz8"] Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.843454 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.845796 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.857359 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-69bc86474b-hppz8"] Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.918327 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-79645699d7-hqzxf"] Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.919721 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-79645699d7-hqzxf"] Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.919944 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.941579 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.986898 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-combined-ca-bundle\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.986958 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data-custom\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.987000 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-combined-ca-bundle\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.987020 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwbj5\" (UniqueName: \"kubernetes.io/projected/013bab31-6536-4eef-911e-a1e4f35a835e-kube-api-access-kwbj5\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.987038 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.987558 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.987723 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpwxs\" (UniqueName: \"kubernetes.io/projected/c90999f3-789d-415f-9e3c-67b94a0172fb-kube-api-access-hpwxs\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:18 crc kubenswrapper[4773]: I0122 13:40:18.987747 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data-custom\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.089771 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.090156 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbk2r\" (UniqueName: \"kubernetes.io/projected/bea4ee1c-25a1-4bb6-829b-088a2de31247-kube-api-access-hbk2r\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.090254 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpwxs\" (UniqueName: \"kubernetes.io/projected/c90999f3-789d-415f-9e3c-67b94a0172fb-kube-api-access-hpwxs\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.090979 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data-custom\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.091139 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-combined-ca-bundle\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.091483 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-combined-ca-bundle\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.091554 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data-custom\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.091623 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-combined-ca-bundle\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.091661 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwbj5\" (UniqueName: \"kubernetes.io/projected/013bab31-6536-4eef-911e-a1e4f35a835e-kube-api-access-kwbj5\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.091720 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.092469 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data-custom\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.092594 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.098509 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.098953 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-combined-ca-bundle\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.099276 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.099305 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data-custom\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.102788 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data-custom\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.110716 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-combined-ca-bundle\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.111751 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpwxs\" (UniqueName: \"kubernetes.io/projected/c90999f3-789d-415f-9e3c-67b94a0172fb-kube-api-access-hpwxs\") pod \"heat-api-69bc86474b-hppz8\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.115016 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwbj5\" (UniqueName: \"kubernetes.io/projected/013bab31-6536-4eef-911e-a1e4f35a835e-kube-api-access-kwbj5\") pod \"heat-engine-66f9c7c857-mcmx8\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.146621 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.163206 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.204434 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.204667 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbk2r\" (UniqueName: \"kubernetes.io/projected/bea4ee1c-25a1-4bb6-829b-088a2de31247-kube-api-access-hbk2r\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.204780 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-combined-ca-bundle\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.204995 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data-custom\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.217126 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data-custom\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.217469 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-combined-ca-bundle\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.221757 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.242012 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbk2r\" (UniqueName: \"kubernetes.io/projected/bea4ee1c-25a1-4bb6-829b-088a2de31247-kube-api-access-hbk2r\") pod \"heat-cfnapi-79645699d7-hqzxf\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.259454 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.782655 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-66f9c7c857-mcmx8"] Jan 22 13:40:19 crc kubenswrapper[4773]: W0122 13:40:19.832595 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc90999f3_789d_415f_9e3c_67b94a0172fb.slice/crio-92cb6dd9f68c13daf337caf4fc6270148eafa2f396c57745b1da4f05bb4668df WatchSource:0}: Error finding container 92cb6dd9f68c13daf337caf4fc6270148eafa2f396c57745b1da4f05bb4668df: Status 404 returned error can't find the container with id 92cb6dd9f68c13daf337caf4fc6270148eafa2f396c57745b1da4f05bb4668df Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.849023 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-69bc86474b-hppz8"] Jan 22 13:40:19 crc kubenswrapper[4773]: W0122 13:40:19.924012 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbea4ee1c_25a1_4bb6_829b_088a2de31247.slice/crio-3119e5440c4518c5e5354fd2640b5f984cfab4cce5c9230947b0c29f8bebb7b0 WatchSource:0}: Error finding container 3119e5440c4518c5e5354fd2640b5f984cfab4cce5c9230947b0c29f8bebb7b0: Status 404 returned error can't find the container with id 3119e5440c4518c5e5354fd2640b5f984cfab4cce5c9230947b0c29f8bebb7b0 Jan 22 13:40:19 crc kubenswrapper[4773]: I0122 13:40:19.924217 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-79645699d7-hqzxf"] Jan 22 13:40:20 crc kubenswrapper[4773]: I0122 13:40:20.384160 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-79645699d7-hqzxf" event={"ID":"bea4ee1c-25a1-4bb6-829b-088a2de31247","Type":"ContainerStarted","Data":"3119e5440c4518c5e5354fd2640b5f984cfab4cce5c9230947b0c29f8bebb7b0"} Jan 22 13:40:20 crc kubenswrapper[4773]: I0122 13:40:20.385939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-66f9c7c857-mcmx8" event={"ID":"013bab31-6536-4eef-911e-a1e4f35a835e","Type":"ContainerStarted","Data":"0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3"} Jan 22 13:40:20 crc kubenswrapper[4773]: I0122 13:40:20.385986 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-66f9c7c857-mcmx8" event={"ID":"013bab31-6536-4eef-911e-a1e4f35a835e","Type":"ContainerStarted","Data":"83a98c81ab020cd73b57c81fd106f526fbe68a1bbdb2c0647675084413190d89"} Jan 22 13:40:20 crc kubenswrapper[4773]: I0122 13:40:20.387304 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-69bc86474b-hppz8" event={"ID":"c90999f3-789d-415f-9e3c-67b94a0172fb","Type":"ContainerStarted","Data":"92cb6dd9f68c13daf337caf4fc6270148eafa2f396c57745b1da4f05bb4668df"} Jan 22 13:40:21 crc kubenswrapper[4773]: I0122 13:40:21.402776 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:21 crc kubenswrapper[4773]: I0122 13:40:21.426380 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-66f9c7c857-mcmx8" podStartSLOduration=3.426352257 podStartE2EDuration="3.426352257s" podCreationTimestamp="2026-01-22 13:40:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:40:21.419203826 +0000 UTC m=+6328.997319661" watchObservedRunningTime="2026-01-22 13:40:21.426352257 +0000 UTC m=+6329.004468102" Jan 22 13:40:23 crc kubenswrapper[4773]: I0122 13:40:23.059144 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-ddef-account-create-update-4qzfx"] Jan 22 13:40:23 crc kubenswrapper[4773]: I0122 13:40:23.071324 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-nfjnh"] Jan 22 13:40:23 crc kubenswrapper[4773]: I0122 13:40:23.079329 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-ddef-account-create-update-4qzfx"] Jan 22 13:40:23 crc kubenswrapper[4773]: I0122 13:40:23.086540 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-nfjnh"] Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.434330 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-69bc86474b-hppz8" event={"ID":"c90999f3-789d-415f-9e3c-67b94a0172fb","Type":"ContainerStarted","Data":"90d4a7d02d183727ff1d9590ce62ad853c96b47015f6a1843c7e4c1b71429a30"} Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.435043 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.438339 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-79645699d7-hqzxf" event={"ID":"bea4ee1c-25a1-4bb6-829b-088a2de31247","Type":"ContainerStarted","Data":"b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7"} Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.438554 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.458206 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-69bc86474b-hppz8" podStartSLOduration=2.783195761 podStartE2EDuration="6.458179309s" podCreationTimestamp="2026-01-22 13:40:18 +0000 UTC" firstStartedPulling="2026-01-22 13:40:19.839872138 +0000 UTC m=+6327.417987963" lastFinishedPulling="2026-01-22 13:40:23.514855686 +0000 UTC m=+6331.092971511" observedRunningTime="2026-01-22 13:40:24.455892024 +0000 UTC m=+6332.034007849" watchObservedRunningTime="2026-01-22 13:40:24.458179309 +0000 UTC m=+6332.036295134" Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.477007 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-79645699d7-hqzxf" podStartSLOduration=2.88704742 podStartE2EDuration="6.476984537s" podCreationTimestamp="2026-01-22 13:40:18 +0000 UTC" firstStartedPulling="2026-01-22 13:40:19.926146763 +0000 UTC m=+6327.504262588" lastFinishedPulling="2026-01-22 13:40:23.51608388 +0000 UTC m=+6331.094199705" observedRunningTime="2026-01-22 13:40:24.473372426 +0000 UTC m=+6332.051488261" watchObservedRunningTime="2026-01-22 13:40:24.476984537 +0000 UTC m=+6332.055100362" Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.680044 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a29aee8-80d7-4408-be21-ef961794ede3" path="/var/lib/kubelet/pods/4a29aee8-80d7-4408-be21-ef961794ede3/volumes" Jan 22 13:40:24 crc kubenswrapper[4773]: I0122 13:40:24.680770 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baf43c35-2259-4842-ba3b-cd8ed4ffce8c" path="/var/lib/kubelet/pods/baf43c35-2259-4842-ba3b-cd8ed4ffce8c/volumes" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.252112 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-54c5697d4f-hg5lj"] Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.255257 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.263541 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-d88f67fbd-7wn5r"] Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.265453 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.272856 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-54c5697d4f-hg5lj"] Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.320496 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6f84465cdd-mvww6"] Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.321903 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.365383 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-d88f67fbd-7wn5r"] Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.376357 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6f84465cdd-mvww6"] Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.390441 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-config-data\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.390743 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-config-data-custom\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.390844 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-combined-ca-bundle\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.390951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvsds\" (UniqueName: \"kubernetes.io/projected/06be61a2-6723-45ac-a38c-37b0ddf88470-kube-api-access-qvsds\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391073 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7drrz\" (UniqueName: \"kubernetes.io/projected/93a64aef-7e70-4977-8c33-203a4508abf2-kube-api-access-7drrz\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391162 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391278 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-combined-ca-bundle\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391495 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data-custom\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391592 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data-custom\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391690 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvvk2\" (UniqueName: \"kubernetes.io/projected/0633a562-5717-4b8e-a11f-1fc3eef8f23a-kube-api-access-xvvk2\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391794 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.391925 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-combined-ca-bundle\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.493531 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data-custom\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.494247 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data-custom\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495077 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvvk2\" (UniqueName: \"kubernetes.io/projected/0633a562-5717-4b8e-a11f-1fc3eef8f23a-kube-api-access-xvvk2\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495211 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495365 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-combined-ca-bundle\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495463 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-config-data\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495546 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-config-data-custom\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495636 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-combined-ca-bundle\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495726 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvsds\" (UniqueName: \"kubernetes.io/projected/06be61a2-6723-45ac-a38c-37b0ddf88470-kube-api-access-qvsds\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495806 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7drrz\" (UniqueName: \"kubernetes.io/projected/93a64aef-7e70-4977-8c33-203a4508abf2-kube-api-access-7drrz\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495880 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.495973 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-combined-ca-bundle\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.504138 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data-custom\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.504418 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.504630 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.506111 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-combined-ca-bundle\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.507428 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-config-data-custom\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.515186 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-combined-ca-bundle\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.515839 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93a64aef-7e70-4977-8c33-203a4508abf2-config-data\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.520180 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-combined-ca-bundle\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.520428 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7drrz\" (UniqueName: \"kubernetes.io/projected/93a64aef-7e70-4977-8c33-203a4508abf2-kube-api-access-7drrz\") pod \"heat-engine-54c5697d4f-hg5lj\" (UID: \"93a64aef-7e70-4977-8c33-203a4508abf2\") " pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.524187 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data-custom\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.525459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvvk2\" (UniqueName: \"kubernetes.io/projected/0633a562-5717-4b8e-a11f-1fc3eef8f23a-kube-api-access-xvvk2\") pod \"heat-api-d88f67fbd-7wn5r\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.527582 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvsds\" (UniqueName: \"kubernetes.io/projected/06be61a2-6723-45ac-a38c-37b0ddf88470-kube-api-access-qvsds\") pod \"heat-cfnapi-6f84465cdd-mvww6\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.586738 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.609077 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:26 crc kubenswrapper[4773]: I0122 13:40:26.655262 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.147581 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-54c5697d4f-hg5lj"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.270868 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-d88f67fbd-7wn5r"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.366448 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6f84465cdd-mvww6"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.468765 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" event={"ID":"06be61a2-6723-45ac-a38c-37b0ddf88470","Type":"ContainerStarted","Data":"dafa8fac69e8139615d9f1ec37f8ea13048aba0d777f14e4ccd293655905cc91"} Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.471579 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-54c5697d4f-hg5lj" event={"ID":"93a64aef-7e70-4977-8c33-203a4508abf2","Type":"ContainerStarted","Data":"f1e6b999e147a40ffad8bdb8d3673b2811f13412fd3ea4878e92a86b2a060c20"} Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.471650 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-54c5697d4f-hg5lj" event={"ID":"93a64aef-7e70-4977-8c33-203a4508abf2","Type":"ContainerStarted","Data":"7ba0c1b9d75d9eb1e502f21d9961b22a2c8fa0b15448906f5530466752cf7a36"} Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.472447 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.478658 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-d88f67fbd-7wn5r" event={"ID":"0633a562-5717-4b8e-a11f-1fc3eef8f23a","Type":"ContainerStarted","Data":"c9ff81a614459ad4a59d97a57968d8d394aee4b25ac819466ac09ae756886054"} Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.478699 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-d88f67fbd-7wn5r" event={"ID":"0633a562-5717-4b8e-a11f-1fc3eef8f23a","Type":"ContainerStarted","Data":"d877e5057af34cc52e8a0058dbb7ff174a0d6af594b8f651dd4f3d0107d51fdd"} Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.478878 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.506249 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-69bc86474b-hppz8"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.506523 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-69bc86474b-hppz8" podUID="c90999f3-789d-415f-9e3c-67b94a0172fb" containerName="heat-api" containerID="cri-o://90d4a7d02d183727ff1d9590ce62ad853c96b47015f6a1843c7e4c1b71429a30" gracePeriod=60 Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.508116 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-54c5697d4f-hg5lj" podStartSLOduration=1.5080940790000001 podStartE2EDuration="1.508094079s" podCreationTimestamp="2026-01-22 13:40:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:40:27.493129928 +0000 UTC m=+6335.071245753" watchObservedRunningTime="2026-01-22 13:40:27.508094079 +0000 UTC m=+6335.086209904" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.567693 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-5c7548b94d-xvrz6"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.570191 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.572303 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-internal-svc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.584317 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-public-svc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.625808 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-79645699d7-hqzxf"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.626603 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-79645699d7-hqzxf" podUID="bea4ee1c-25a1-4bb6-829b-088a2de31247" containerName="heat-cfnapi" containerID="cri-o://b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7" gracePeriod=60 Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.635653 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-public-tls-certs\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.635731 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-config-data-custom\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.636050 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-internal-tls-certs\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.636091 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-combined-ca-bundle\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.636141 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7jcr\" (UniqueName: \"kubernetes.io/projected/15dbb5e0-8d05-416b-b268-906db6b67cf8-kube-api-access-m7jcr\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.636226 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-config-data\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.648336 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-d88f67fbd-7wn5r" podStartSLOduration=1.6482872290000001 podStartE2EDuration="1.648287229s" podCreationTimestamp="2026-01-22 13:40:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:40:27.550654065 +0000 UTC m=+6335.128769900" watchObservedRunningTime="2026-01-22 13:40:27.648287229 +0000 UTC m=+6335.226403044" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.686896 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5c7548b94d-xvrz6"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.699154 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-68567c67d-p7vzc"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.700630 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.702628 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-public-svc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.702887 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-internal-svc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.719140 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-68567c67d-p7vzc"] Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743066 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-internal-tls-certs\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743126 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-config-data\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743182 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-combined-ca-bundle\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743261 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4pqv\" (UniqueName: \"kubernetes.io/projected/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-kube-api-access-z4pqv\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743314 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7jcr\" (UniqueName: \"kubernetes.io/projected/15dbb5e0-8d05-416b-b268-906db6b67cf8-kube-api-access-m7jcr\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743365 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-config-data-custom\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743426 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-config-data\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743510 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-public-tls-certs\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743525 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-public-tls-certs\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743571 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-config-data-custom\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743698 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-combined-ca-bundle\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.743920 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-internal-tls-certs\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.764592 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-config-data\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.765347 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-public-tls-certs\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.765969 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-combined-ca-bundle\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.767778 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-config-data-custom\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.768167 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7jcr\" (UniqueName: \"kubernetes.io/projected/15dbb5e0-8d05-416b-b268-906db6b67cf8-kube-api-access-m7jcr\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.776262 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15dbb5e0-8d05-416b-b268-906db6b67cf8-internal-tls-certs\") pod \"heat-api-5c7548b94d-xvrz6\" (UID: \"15dbb5e0-8d05-416b-b268-906db6b67cf8\") " pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.847689 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-combined-ca-bundle\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.847785 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-internal-tls-certs\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.847835 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-config-data\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.847877 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4pqv\" (UniqueName: \"kubernetes.io/projected/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-kube-api-access-z4pqv\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.847932 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-config-data-custom\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.848001 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-public-tls-certs\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.853088 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-internal-tls-certs\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.857937 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-public-tls-certs\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.865549 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-combined-ca-bundle\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.866563 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-config-data-custom\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.870727 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-config-data\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.872642 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4pqv\" (UniqueName: \"kubernetes.io/projected/01ff0cd8-8d9d-4666-bd26-dde1424eb01f-kube-api-access-z4pqv\") pod \"heat-cfnapi-68567c67d-p7vzc\" (UID: \"01ff0cd8-8d9d-4666-bd26-dde1424eb01f\") " pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:27 crc kubenswrapper[4773]: I0122 13:40:27.926245 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.024126 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.350409 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.459847 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data-custom\") pod \"bea4ee1c-25a1-4bb6-829b-088a2de31247\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.460512 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-combined-ca-bundle\") pod \"bea4ee1c-25a1-4bb6-829b-088a2de31247\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.460548 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data\") pod \"bea4ee1c-25a1-4bb6-829b-088a2de31247\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.460614 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbk2r\" (UniqueName: \"kubernetes.io/projected/bea4ee1c-25a1-4bb6-829b-088a2de31247-kube-api-access-hbk2r\") pod \"bea4ee1c-25a1-4bb6-829b-088a2de31247\" (UID: \"bea4ee1c-25a1-4bb6-829b-088a2de31247\") " Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.467125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bea4ee1c-25a1-4bb6-829b-088a2de31247" (UID: "bea4ee1c-25a1-4bb6-829b-088a2de31247"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.473294 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea4ee1c-25a1-4bb6-829b-088a2de31247-kube-api-access-hbk2r" (OuterVolumeSpecName: "kube-api-access-hbk2r") pod "bea4ee1c-25a1-4bb6-829b-088a2de31247" (UID: "bea4ee1c-25a1-4bb6-829b-088a2de31247"). InnerVolumeSpecName "kube-api-access-hbk2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.502341 4773 generic.go:334] "Generic (PLEG): container finished" podID="bea4ee1c-25a1-4bb6-829b-088a2de31247" containerID="b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7" exitCode=0 Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.502421 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-79645699d7-hqzxf" event={"ID":"bea4ee1c-25a1-4bb6-829b-088a2de31247","Type":"ContainerDied","Data":"b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7"} Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.502454 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-79645699d7-hqzxf" event={"ID":"bea4ee1c-25a1-4bb6-829b-088a2de31247","Type":"ContainerDied","Data":"3119e5440c4518c5e5354fd2640b5f984cfab4cce5c9230947b0c29f8bebb7b0"} Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.502472 4773 scope.go:117] "RemoveContainer" containerID="b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.502665 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-79645699d7-hqzxf" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.506245 4773 generic.go:334] "Generic (PLEG): container finished" podID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerID="60e3a5bef7006c8223b7d5fff3577af4426e667aae336817eb235d78dfdcc7b5" exitCode=1 Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.506456 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" event={"ID":"06be61a2-6723-45ac-a38c-37b0ddf88470","Type":"ContainerDied","Data":"60e3a5bef7006c8223b7d5fff3577af4426e667aae336817eb235d78dfdcc7b5"} Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.507112 4773 scope.go:117] "RemoveContainer" containerID="60e3a5bef7006c8223b7d5fff3577af4426e667aae336817eb235d78dfdcc7b5" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.510261 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bea4ee1c-25a1-4bb6-829b-088a2de31247" (UID: "bea4ee1c-25a1-4bb6-829b-088a2de31247"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.520471 4773 generic.go:334] "Generic (PLEG): container finished" podID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerID="c9ff81a614459ad4a59d97a57968d8d394aee4b25ac819466ac09ae756886054" exitCode=1 Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.520718 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-d88f67fbd-7wn5r" event={"ID":"0633a562-5717-4b8e-a11f-1fc3eef8f23a","Type":"ContainerDied","Data":"c9ff81a614459ad4a59d97a57968d8d394aee4b25ac819466ac09ae756886054"} Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.526201 4773 scope.go:117] "RemoveContainer" containerID="c9ff81a614459ad4a59d97a57968d8d394aee4b25ac819466ac09ae756886054" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.548225 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.549170 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data" (OuterVolumeSpecName: "config-data") pod "bea4ee1c-25a1-4bb6-829b-088a2de31247" (UID: "bea4ee1c-25a1-4bb6-829b-088a2de31247"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.560888 4773 generic.go:334] "Generic (PLEG): container finished" podID="c90999f3-789d-415f-9e3c-67b94a0172fb" containerID="90d4a7d02d183727ff1d9590ce62ad853c96b47015f6a1843c7e4c1b71429a30" exitCode=0 Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.561455 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-69bc86474b-hppz8" event={"ID":"c90999f3-789d-415f-9e3c-67b94a0172fb","Type":"ContainerDied","Data":"90d4a7d02d183727ff1d9590ce62ad853c96b47015f6a1843c7e4c1b71429a30"} Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.565148 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.565187 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.565203 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bea4ee1c-25a1-4bb6-829b-088a2de31247-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.565215 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbk2r\" (UniqueName: \"kubernetes.io/projected/bea4ee1c-25a1-4bb6-829b-088a2de31247-kube-api-access-hbk2r\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.713801 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5c7548b94d-xvrz6"] Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.743298 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-68567c67d-p7vzc"] Jan 22 13:40:28 crc kubenswrapper[4773]: W0122 13:40:28.753147 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01ff0cd8_8d9d_4666_bd26_dde1424eb01f.slice/crio-ffff28abd741b04e43af0ae1c95b3acc74d84c49c7def4757593f5a43eeb79f9 WatchSource:0}: Error finding container ffff28abd741b04e43af0ae1c95b3acc74d84c49c7def4757593f5a43eeb79f9: Status 404 returned error can't find the container with id ffff28abd741b04e43af0ae1c95b3acc74d84c49c7def4757593f5a43eeb79f9 Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.774596 4773 scope.go:117] "RemoveContainer" containerID="b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7" Jan 22 13:40:28 crc kubenswrapper[4773]: E0122 13:40:28.775701 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7\": container with ID starting with b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7 not found: ID does not exist" containerID="b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.778827 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7"} err="failed to get container status \"b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7\": rpc error: code = NotFound desc = could not find container \"b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7\": container with ID starting with b141e3108644d42f833eae1abe686c7be93486a35cbb8619ed01adc96d9111a7 not found: ID does not exist" Jan 22 13:40:28 crc kubenswrapper[4773]: I0122 13:40:28.971242 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.053788 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-79645699d7-hqzxf"] Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.064556 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-79645699d7-hqzxf"] Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.085096 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-combined-ca-bundle\") pod \"c90999f3-789d-415f-9e3c-67b94a0172fb\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.085176 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data\") pod \"c90999f3-789d-415f-9e3c-67b94a0172fb\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.085240 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data-custom\") pod \"c90999f3-789d-415f-9e3c-67b94a0172fb\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.085576 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpwxs\" (UniqueName: \"kubernetes.io/projected/c90999f3-789d-415f-9e3c-67b94a0172fb-kube-api-access-hpwxs\") pod \"c90999f3-789d-415f-9e3c-67b94a0172fb\" (UID: \"c90999f3-789d-415f-9e3c-67b94a0172fb\") " Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.126833 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c90999f3-789d-415f-9e3c-67b94a0172fb-kube-api-access-hpwxs" (OuterVolumeSpecName: "kube-api-access-hpwxs") pod "c90999f3-789d-415f-9e3c-67b94a0172fb" (UID: "c90999f3-789d-415f-9e3c-67b94a0172fb"). InnerVolumeSpecName "kube-api-access-hpwxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.128200 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c90999f3-789d-415f-9e3c-67b94a0172fb" (UID: "c90999f3-789d-415f-9e3c-67b94a0172fb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.188790 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.188833 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpwxs\" (UniqueName: \"kubernetes.io/projected/c90999f3-789d-415f-9e3c-67b94a0172fb-kube-api-access-hpwxs\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.271496 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c90999f3-789d-415f-9e3c-67b94a0172fb" (UID: "c90999f3-789d-415f-9e3c-67b94a0172fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.293698 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.314406 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data" (OuterVolumeSpecName: "config-data") pod "c90999f3-789d-415f-9e3c-67b94a0172fb" (UID: "c90999f3-789d-415f-9e3c-67b94a0172fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.396853 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c90999f3-789d-415f-9e3c-67b94a0172fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.588685 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-68567c67d-p7vzc" event={"ID":"01ff0cd8-8d9d-4666-bd26-dde1424eb01f","Type":"ContainerStarted","Data":"4a485c92a4c165d5df9a5e25350f0fb2e2713aac48694e77bfd2d9ceabafd8b4"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.588736 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-68567c67d-p7vzc" event={"ID":"01ff0cd8-8d9d-4666-bd26-dde1424eb01f","Type":"ContainerStarted","Data":"ffff28abd741b04e43af0ae1c95b3acc74d84c49c7def4757593f5a43eeb79f9"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.590147 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.605585 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-69bc86474b-hppz8" event={"ID":"c90999f3-789d-415f-9e3c-67b94a0172fb","Type":"ContainerDied","Data":"92cb6dd9f68c13daf337caf4fc6270148eafa2f396c57745b1da4f05bb4668df"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.605650 4773 scope.go:117] "RemoveContainer" containerID="90d4a7d02d183727ff1d9590ce62ad853c96b47015f6a1843c7e4c1b71429a30" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.605809 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-69bc86474b-hppz8" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.636752 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-68567c67d-p7vzc" podStartSLOduration=2.636719246 podStartE2EDuration="2.636719246s" podCreationTimestamp="2026-01-22 13:40:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:40:29.609196963 +0000 UTC m=+6337.187312788" watchObservedRunningTime="2026-01-22 13:40:29.636719246 +0000 UTC m=+6337.214835071" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.639929 4773 generic.go:334] "Generic (PLEG): container finished" podID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerID="227f89798824a1e0c2d8ae18b432dae51122fca2ccd12373e01b1b9710434aae" exitCode=1 Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.640001 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" event={"ID":"06be61a2-6723-45ac-a38c-37b0ddf88470","Type":"ContainerDied","Data":"227f89798824a1e0c2d8ae18b432dae51122fca2ccd12373e01b1b9710434aae"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.646843 4773 scope.go:117] "RemoveContainer" containerID="227f89798824a1e0c2d8ae18b432dae51122fca2ccd12373e01b1b9710434aae" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.646898 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-d88f67fbd-7wn5r" event={"ID":"0633a562-5717-4b8e-a11f-1fc3eef8f23a","Type":"ContainerDied","Data":"3af2844aad41bacf116c3ed83c428bab90dbe534f099cf60620bcb649a9a7e01"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.645397 4773 generic.go:334] "Generic (PLEG): container finished" podID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerID="3af2844aad41bacf116c3ed83c428bab90dbe534f099cf60620bcb649a9a7e01" exitCode=1 Jan 22 13:40:29 crc kubenswrapper[4773]: E0122 13:40:29.647160 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-6f84465cdd-mvww6_openstack(06be61a2-6723-45ac-a38c-37b0ddf88470)\"" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.649119 4773 scope.go:117] "RemoveContainer" containerID="3af2844aad41bacf116c3ed83c428bab90dbe534f099cf60620bcb649a9a7e01" Jan 22 13:40:29 crc kubenswrapper[4773]: E0122 13:40:29.652668 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-d88f67fbd-7wn5r_openstack(0633a562-5717-4b8e-a11f-1fc3eef8f23a)\"" pod="openstack/heat-api-d88f67fbd-7wn5r" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.678850 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5c7548b94d-xvrz6" event={"ID":"15dbb5e0-8d05-416b-b268-906db6b67cf8","Type":"ContainerStarted","Data":"8c8fcfe5ea4001886e81a32ebe58bace31d9905c554c6d3dce24853006b47b22"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.678939 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5c7548b94d-xvrz6" event={"ID":"15dbb5e0-8d05-416b-b268-906db6b67cf8","Type":"ContainerStarted","Data":"8778619a7f1c0776c3f89bb55deff60d5ebebc1ac3f49e6ed37a0c0ccb2401f3"} Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.679495 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.697113 4773 scope.go:117] "RemoveContainer" containerID="60e3a5bef7006c8223b7d5fff3577af4426e667aae336817eb235d78dfdcc7b5" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.790485 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-5c7548b94d-xvrz6" podStartSLOduration=2.790461677 podStartE2EDuration="2.790461677s" podCreationTimestamp="2026-01-22 13:40:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:40:29.751869493 +0000 UTC m=+6337.329985338" watchObservedRunningTime="2026-01-22 13:40:29.790461677 +0000 UTC m=+6337.368577502" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.807450 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-69bc86474b-hppz8"] Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.811958 4773 scope.go:117] "RemoveContainer" containerID="c9ff81a614459ad4a59d97a57968d8d394aee4b25ac819466ac09ae756886054" Jan 22 13:40:29 crc kubenswrapper[4773]: I0122 13:40:29.815063 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-69bc86474b-hppz8"] Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.697096 4773 scope.go:117] "RemoveContainer" containerID="227f89798824a1e0c2d8ae18b432dae51122fca2ccd12373e01b1b9710434aae" Jan 22 13:40:30 crc kubenswrapper[4773]: E0122 13:40:30.697615 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-6f84465cdd-mvww6_openstack(06be61a2-6723-45ac-a38c-37b0ddf88470)\"" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.701428 4773 scope.go:117] "RemoveContainer" containerID="3af2844aad41bacf116c3ed83c428bab90dbe534f099cf60620bcb649a9a7e01" Jan 22 13:40:30 crc kubenswrapper[4773]: E0122 13:40:30.701659 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-d88f67fbd-7wn5r_openstack(0633a562-5717-4b8e-a11f-1fc3eef8f23a)\"" pod="openstack/heat-api-d88f67fbd-7wn5r" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.811976 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bea4ee1c-25a1-4bb6-829b-088a2de31247" path="/var/lib/kubelet/pods/bea4ee1c-25a1-4bb6-829b-088a2de31247/volumes" Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.812987 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c90999f3-789d-415f-9e3c-67b94a0172fb" path="/var/lib/kubelet/pods/c90999f3-789d-415f-9e3c-67b94a0172fb/volumes" Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.889091 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-64d88cbf74-c2bxb" Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.952103 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6cdcfc4746-tlv4z"] Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.954678 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6cdcfc4746-tlv4z" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon-log" containerID="cri-o://26e525b123f04cad1f329caae148814476a7b9a39d7bf6bdbcbfc4cc5dae7d98" gracePeriod=30 Jan 22 13:40:30 crc kubenswrapper[4773]: I0122 13:40:30.954826 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6cdcfc4746-tlv4z" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" containerID="cri-o://3be82d96f92886f3fc9bdff193385c39a68d54ae4fc48a63ebc3257fafbe1407" gracePeriod=30 Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.044074 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-q87g6"] Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.054565 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-q87g6"] Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.609497 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.609575 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.656395 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.656466 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.707902 4773 scope.go:117] "RemoveContainer" containerID="3af2844aad41bacf116c3ed83c428bab90dbe534f099cf60620bcb649a9a7e01" Jan 22 13:40:31 crc kubenswrapper[4773]: I0122 13:40:31.707976 4773 scope.go:117] "RemoveContainer" containerID="227f89798824a1e0c2d8ae18b432dae51122fca2ccd12373e01b1b9710434aae" Jan 22 13:40:31 crc kubenswrapper[4773]: E0122 13:40:31.708214 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-6f84465cdd-mvww6_openstack(06be61a2-6723-45ac-a38c-37b0ddf88470)\"" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" Jan 22 13:40:31 crc kubenswrapper[4773]: E0122 13:40:31.708230 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-d88f67fbd-7wn5r_openstack(0633a562-5717-4b8e-a11f-1fc3eef8f23a)\"" pod="openstack/heat-api-d88f67fbd-7wn5r" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" Jan 22 13:40:32 crc kubenswrapper[4773]: I0122 13:40:32.673341 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dede7e19-f7fd-4acb-a9b4-26b48bfdbc69" path="/var/lib/kubelet/pods/dede7e19-f7fd-4acb-a9b4-26b48bfdbc69/volumes" Jan 22 13:40:34 crc kubenswrapper[4773]: I0122 13:40:34.074303 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:40:34 crc kubenswrapper[4773]: I0122 13:40:34.074369 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:40:34 crc kubenswrapper[4773]: I0122 13:40:34.742529 4773 generic.go:334] "Generic (PLEG): container finished" podID="746d3447-d12f-4158-89bc-9bb26d157e47" containerID="3be82d96f92886f3fc9bdff193385c39a68d54ae4fc48a63ebc3257fafbe1407" exitCode=0 Jan 22 13:40:34 crc kubenswrapper[4773]: I0122 13:40:34.742612 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdcfc4746-tlv4z" event={"ID":"746d3447-d12f-4158-89bc-9bb26d157e47","Type":"ContainerDied","Data":"3be82d96f92886f3fc9bdff193385c39a68d54ae4fc48a63ebc3257fafbe1407"} Jan 22 13:40:39 crc kubenswrapper[4773]: I0122 13:40:39.180386 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:39 crc kubenswrapper[4773]: I0122 13:40:39.602619 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-5c7548b94d-xvrz6" Jan 22 13:40:39 crc kubenswrapper[4773]: I0122 13:40:39.660548 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-d88f67fbd-7wn5r"] Jan 22 13:40:39 crc kubenswrapper[4773]: I0122 13:40:39.752718 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-68567c67d-p7vzc" Jan 22 13:40:39 crc kubenswrapper[4773]: I0122 13:40:39.844990 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-6f84465cdd-mvww6"] Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.147093 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.328339 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-combined-ca-bundle\") pod \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.328408 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvvk2\" (UniqueName: \"kubernetes.io/projected/0633a562-5717-4b8e-a11f-1fc3eef8f23a-kube-api-access-xvvk2\") pod \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.328455 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data\") pod \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.328482 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data-custom\") pod \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\" (UID: \"0633a562-5717-4b8e-a11f-1fc3eef8f23a\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.334166 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0633a562-5717-4b8e-a11f-1fc3eef8f23a" (UID: "0633a562-5717-4b8e-a11f-1fc3eef8f23a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.334525 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0633a562-5717-4b8e-a11f-1fc3eef8f23a-kube-api-access-xvvk2" (OuterVolumeSpecName: "kube-api-access-xvvk2") pod "0633a562-5717-4b8e-a11f-1fc3eef8f23a" (UID: "0633a562-5717-4b8e-a11f-1fc3eef8f23a"). InnerVolumeSpecName "kube-api-access-xvvk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.371232 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0633a562-5717-4b8e-a11f-1fc3eef8f23a" (UID: "0633a562-5717-4b8e-a11f-1fc3eef8f23a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.401229 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.430655 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.430781 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvvk2\" (UniqueName: \"kubernetes.io/projected/0633a562-5717-4b8e-a11f-1fc3eef8f23a-kube-api-access-xvvk2\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.430798 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.436409 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data" (OuterVolumeSpecName: "config-data") pod "0633a562-5717-4b8e-a11f-1fc3eef8f23a" (UID: "0633a562-5717-4b8e-a11f-1fc3eef8f23a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.532354 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data-custom\") pod \"06be61a2-6723-45ac-a38c-37b0ddf88470\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.532831 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-combined-ca-bundle\") pod \"06be61a2-6723-45ac-a38c-37b0ddf88470\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.532947 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data\") pod \"06be61a2-6723-45ac-a38c-37b0ddf88470\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.533213 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvsds\" (UniqueName: \"kubernetes.io/projected/06be61a2-6723-45ac-a38c-37b0ddf88470-kube-api-access-qvsds\") pod \"06be61a2-6723-45ac-a38c-37b0ddf88470\" (UID: \"06be61a2-6723-45ac-a38c-37b0ddf88470\") " Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.534205 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0633a562-5717-4b8e-a11f-1fc3eef8f23a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.536413 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "06be61a2-6723-45ac-a38c-37b0ddf88470" (UID: "06be61a2-6723-45ac-a38c-37b0ddf88470"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.537341 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06be61a2-6723-45ac-a38c-37b0ddf88470-kube-api-access-qvsds" (OuterVolumeSpecName: "kube-api-access-qvsds") pod "06be61a2-6723-45ac-a38c-37b0ddf88470" (UID: "06be61a2-6723-45ac-a38c-37b0ddf88470"). InnerVolumeSpecName "kube-api-access-qvsds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.566581 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06be61a2-6723-45ac-a38c-37b0ddf88470" (UID: "06be61a2-6723-45ac-a38c-37b0ddf88470"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.591493 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data" (OuterVolumeSpecName: "config-data") pod "06be61a2-6723-45ac-a38c-37b0ddf88470" (UID: "06be61a2-6723-45ac-a38c-37b0ddf88470"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.636970 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvsds\" (UniqueName: \"kubernetes.io/projected/06be61a2-6723-45ac-a38c-37b0ddf88470-kube-api-access-qvsds\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.637019 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.637028 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.637037 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06be61a2-6723-45ac-a38c-37b0ddf88470-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.810149 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" event={"ID":"06be61a2-6723-45ac-a38c-37b0ddf88470","Type":"ContainerDied","Data":"dafa8fac69e8139615d9f1ec37f8ea13048aba0d777f14e4ccd293655905cc91"} Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.810219 4773 scope.go:117] "RemoveContainer" containerID="227f89798824a1e0c2d8ae18b432dae51122fca2ccd12373e01b1b9710434aae" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.810339 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6f84465cdd-mvww6" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.821311 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-d88f67fbd-7wn5r" event={"ID":"0633a562-5717-4b8e-a11f-1fc3eef8f23a","Type":"ContainerDied","Data":"d877e5057af34cc52e8a0058dbb7ff174a0d6af594b8f651dd4f3d0107d51fdd"} Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.821408 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-d88f67fbd-7wn5r" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.826249 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6cdcfc4746-tlv4z" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.124:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8443: connect: connection refused" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.857779 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-6f84465cdd-mvww6"] Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.860270 4773 scope.go:117] "RemoveContainer" containerID="3af2844aad41bacf116c3ed83c428bab90dbe534f099cf60620bcb649a9a7e01" Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.871726 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-6f84465cdd-mvww6"] Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.880571 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-d88f67fbd-7wn5r"] Jan 22 13:40:40 crc kubenswrapper[4773]: I0122 13:40:40.890227 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-d88f67fbd-7wn5r"] Jan 22 13:40:42 crc kubenswrapper[4773]: I0122 13:40:42.670067 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" path="/var/lib/kubelet/pods/0633a562-5717-4b8e-a11f-1fc3eef8f23a/volumes" Jan 22 13:40:42 crc kubenswrapper[4773]: I0122 13:40:42.670930 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" path="/var/lib/kubelet/pods/06be61a2-6723-45ac-a38c-37b0ddf88470/volumes" Jan 22 13:40:46 crc kubenswrapper[4773]: I0122 13:40:46.626102 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-54c5697d4f-hg5lj" Jan 22 13:40:46 crc kubenswrapper[4773]: I0122 13:40:46.705821 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-66f9c7c857-mcmx8"] Jan 22 13:40:46 crc kubenswrapper[4773]: I0122 13:40:46.706123 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-66f9c7c857-mcmx8" podUID="013bab31-6536-4eef-911e-a1e4f35a835e" containerName="heat-engine" containerID="cri-o://0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3" gracePeriod=60 Jan 22 13:40:49 crc kubenswrapper[4773]: E0122 13:40:49.149021 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Jan 22 13:40:49 crc kubenswrapper[4773]: E0122 13:40:49.151038 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Jan 22 13:40:49 crc kubenswrapper[4773]: E0122 13:40:49.152493 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Jan 22 13:40:49 crc kubenswrapper[4773]: E0122 13:40:49.152578 4773 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-66f9c7c857-mcmx8" podUID="013bab31-6536-4eef-911e-a1e4f35a835e" containerName="heat-engine" Jan 22 13:40:50 crc kubenswrapper[4773]: I0122 13:40:50.826729 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6cdcfc4746-tlv4z" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.124:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8443: connect: connection refused" Jan 22 13:40:52 crc kubenswrapper[4773]: I0122 13:40:52.982215 4773 generic.go:334] "Generic (PLEG): container finished" podID="013bab31-6536-4eef-911e-a1e4f35a835e" containerID="0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3" exitCode=0 Jan 22 13:40:52 crc kubenswrapper[4773]: I0122 13:40:52.982311 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-66f9c7c857-mcmx8" event={"ID":"013bab31-6536-4eef-911e-a1e4f35a835e","Type":"ContainerDied","Data":"0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3"} Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.208669 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.287773 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data-custom\") pod \"013bab31-6536-4eef-911e-a1e4f35a835e\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.288227 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwbj5\" (UniqueName: \"kubernetes.io/projected/013bab31-6536-4eef-911e-a1e4f35a835e-kube-api-access-kwbj5\") pod \"013bab31-6536-4eef-911e-a1e4f35a835e\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.288354 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-combined-ca-bundle\") pod \"013bab31-6536-4eef-911e-a1e4f35a835e\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.288475 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data\") pod \"013bab31-6536-4eef-911e-a1e4f35a835e\" (UID: \"013bab31-6536-4eef-911e-a1e4f35a835e\") " Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.294698 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "013bab31-6536-4eef-911e-a1e4f35a835e" (UID: "013bab31-6536-4eef-911e-a1e4f35a835e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.298606 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/013bab31-6536-4eef-911e-a1e4f35a835e-kube-api-access-kwbj5" (OuterVolumeSpecName: "kube-api-access-kwbj5") pod "013bab31-6536-4eef-911e-a1e4f35a835e" (UID: "013bab31-6536-4eef-911e-a1e4f35a835e"). InnerVolumeSpecName "kube-api-access-kwbj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.320894 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "013bab31-6536-4eef-911e-a1e4f35a835e" (UID: "013bab31-6536-4eef-911e-a1e4f35a835e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.376611 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data" (OuterVolumeSpecName: "config-data") pod "013bab31-6536-4eef-911e-a1e4f35a835e" (UID: "013bab31-6536-4eef-911e-a1e4f35a835e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.392213 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.392246 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.392258 4773 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/013bab31-6536-4eef-911e-a1e4f35a835e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.392268 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwbj5\" (UniqueName: \"kubernetes.io/projected/013bab31-6536-4eef-911e-a1e4f35a835e-kube-api-access-kwbj5\") on node \"crc\" DevicePath \"\"" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.997390 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-66f9c7c857-mcmx8" event={"ID":"013bab31-6536-4eef-911e-a1e4f35a835e","Type":"ContainerDied","Data":"83a98c81ab020cd73b57c81fd106f526fbe68a1bbdb2c0647675084413190d89"} Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.997465 4773 scope.go:117] "RemoveContainer" containerID="0d96228da6e02235cbca6a672f2da6a6452c230faecefefcc635fef43e42aed3" Jan 22 13:40:53 crc kubenswrapper[4773]: I0122 13:40:53.997478 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-66f9c7c857-mcmx8" Jan 22 13:40:54 crc kubenswrapper[4773]: I0122 13:40:54.046099 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-66f9c7c857-mcmx8"] Jan 22 13:40:54 crc kubenswrapper[4773]: I0122 13:40:54.061813 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-66f9c7c857-mcmx8"] Jan 22 13:40:54 crc kubenswrapper[4773]: I0122 13:40:54.673732 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="013bab31-6536-4eef-911e-a1e4f35a835e" path="/var/lib/kubelet/pods/013bab31-6536-4eef-911e-a1e4f35a835e/volumes" Jan 22 13:40:58 crc kubenswrapper[4773]: I0122 13:40:58.743398 4773 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podbea4ee1c-25a1-4bb6-829b-088a2de31247"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podbea4ee1c-25a1-4bb6-829b-088a2de31247] : Timed out while waiting for systemd to remove kubepods-besteffort-podbea4ee1c_25a1_4bb6_829b_088a2de31247.slice" Jan 22 13:41:00 crc kubenswrapper[4773]: I0122 13:41:00.826685 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-6cdcfc4746-tlv4z" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.124:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.124:8443: connect: connection refused" Jan 22 13:41:00 crc kubenswrapper[4773]: I0122 13:41:00.827041 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.091842 4773 generic.go:334] "Generic (PLEG): container finished" podID="746d3447-d12f-4158-89bc-9bb26d157e47" containerID="26e525b123f04cad1f329caae148814476a7b9a39d7bf6bdbcbfc4cc5dae7d98" exitCode=137 Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.092000 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdcfc4746-tlv4z" event={"ID":"746d3447-d12f-4158-89bc-9bb26d157e47","Type":"ContainerDied","Data":"26e525b123f04cad1f329caae148814476a7b9a39d7bf6bdbcbfc4cc5dae7d98"} Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.333294 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380238 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq5kf\" (UniqueName: \"kubernetes.io/projected/746d3447-d12f-4158-89bc-9bb26d157e47-kube-api-access-nq5kf\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380392 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-config-data\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380421 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-combined-ca-bundle\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380442 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-secret-key\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380462 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746d3447-d12f-4158-89bc-9bb26d157e47-logs\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380515 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-scripts\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.380545 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-tls-certs\") pod \"746d3447-d12f-4158-89bc-9bb26d157e47\" (UID: \"746d3447-d12f-4158-89bc-9bb26d157e47\") " Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.382250 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/746d3447-d12f-4158-89bc-9bb26d157e47-logs" (OuterVolumeSpecName: "logs") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.398062 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.398334 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/746d3447-d12f-4158-89bc-9bb26d157e47-kube-api-access-nq5kf" (OuterVolumeSpecName: "kube-api-access-nq5kf") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "kube-api-access-nq5kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.412039 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.425182 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-scripts" (OuterVolumeSpecName: "scripts") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.426053 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-config-data" (OuterVolumeSpecName: "config-data") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.482548 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.482586 4773 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.482597 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.482608 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746d3447-d12f-4158-89bc-9bb26d157e47-logs\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.482619 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/746d3447-d12f-4158-89bc-9bb26d157e47-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.482629 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq5kf\" (UniqueName: \"kubernetes.io/projected/746d3447-d12f-4158-89bc-9bb26d157e47-kube-api-access-nq5kf\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.495589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "746d3447-d12f-4158-89bc-9bb26d157e47" (UID: "746d3447-d12f-4158-89bc-9bb26d157e47"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:41:01 crc kubenswrapper[4773]: I0122 13:41:01.585082 4773 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/746d3447-d12f-4158-89bc-9bb26d157e47-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.104740 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6cdcfc4746-tlv4z" event={"ID":"746d3447-d12f-4158-89bc-9bb26d157e47","Type":"ContainerDied","Data":"7c33027f4936c80d6e9da57eb2881a20e1c35c60ef51605a22cb559a7a5acd00"} Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.104801 4773 scope.go:117] "RemoveContainer" containerID="3be82d96f92886f3fc9bdff193385c39a68d54ae4fc48a63ebc3257fafbe1407" Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.104853 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6cdcfc4746-tlv4z" Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.148822 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6cdcfc4746-tlv4z"] Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.160810 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6cdcfc4746-tlv4z"] Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.292965 4773 scope.go:117] "RemoveContainer" containerID="26e525b123f04cad1f329caae148814476a7b9a39d7bf6bdbcbfc4cc5dae7d98" Jan 22 13:41:02 crc kubenswrapper[4773]: I0122 13:41:02.678087 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" path="/var/lib/kubelet/pods/746d3447-d12f-4158-89bc-9bb26d157e47/volumes" Jan 22 13:41:04 crc kubenswrapper[4773]: I0122 13:41:04.073999 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:41:04 crc kubenswrapper[4773]: I0122 13:41:04.074099 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:41:10 crc kubenswrapper[4773]: I0122 13:41:10.763481 4773 scope.go:117] "RemoveContainer" containerID="d4fabfaf6ec353128c8116eaa179b85d3866aaacfcc11efd4dcacaa9f2c8e6b6" Jan 22 13:41:10 crc kubenswrapper[4773]: I0122 13:41:10.798604 4773 scope.go:117] "RemoveContainer" containerID="fbb7eda80213ed5aaeac14c41c132bf12b770d50fe547a045d0f3e3a5a44d041" Jan 22 13:41:10 crc kubenswrapper[4773]: I0122 13:41:10.877089 4773 scope.go:117] "RemoveContainer" containerID="9c9b49b8c35a41fa36af5436215e2d042e45556e147c336bb248ca84126bd4ce" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.195173 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5"] Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196457 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea4ee1c-25a1-4bb6-829b-088a2de31247" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196496 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea4ee1c-25a1-4bb6-829b-088a2de31247" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196516 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="013bab31-6536-4eef-911e-a1e4f35a835e" containerName="heat-engine" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196524 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="013bab31-6536-4eef-911e-a1e4f35a835e" containerName="heat-engine" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196566 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon-log" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196574 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon-log" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196590 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196599 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196622 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196631 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196645 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196654 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196668 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c90999f3-789d-415f-9e3c-67b94a0172fb" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196677 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c90999f3-789d-415f-9e3c-67b94a0172fb" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.196692 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196701 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.196981 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197002 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197016 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="013bab31-6536-4eef-911e-a1e4f35a835e" containerName="heat-engine" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197044 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197052 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0633a562-5717-4b8e-a11f-1fc3eef8f23a" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197067 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197075 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="746d3447-d12f-4158-89bc-9bb26d157e47" containerName="horizon-log" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197094 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bea4ee1c-25a1-4bb6-829b-088a2de31247" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197106 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c90999f3-789d-415f-9e3c-67b94a0172fb" containerName="heat-api" Jan 22 13:41:17 crc kubenswrapper[4773]: E0122 13:41:17.197343 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.197355 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="06be61a2-6723-45ac-a38c-37b0ddf88470" containerName="heat-cfnapi" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.198938 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.209004 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.213099 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5"] Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.285691 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxdn2\" (UniqueName: \"kubernetes.io/projected/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-kube-api-access-fxdn2\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.285766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.285815 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.387748 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxdn2\" (UniqueName: \"kubernetes.io/projected/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-kube-api-access-fxdn2\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.387837 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.387881 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.388443 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.389113 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.418972 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxdn2\" (UniqueName: \"kubernetes.io/projected/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-kube-api-access-fxdn2\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:17 crc kubenswrapper[4773]: I0122 13:41:17.527001 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:18 crc kubenswrapper[4773]: I0122 13:41:18.048022 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5"] Jan 22 13:41:18 crc kubenswrapper[4773]: I0122 13:41:18.293391 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" event={"ID":"9f8be3bc-66a7-44ee-9eee-61aac695d0b0","Type":"ContainerStarted","Data":"d3e5efac028640471e4d2994e57dc9f463ac682adb71bf42e92c3b0d08323a42"} Jan 22 13:41:18 crc kubenswrapper[4773]: I0122 13:41:18.293452 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" event={"ID":"9f8be3bc-66a7-44ee-9eee-61aac695d0b0","Type":"ContainerStarted","Data":"4aaf669fc753b0e1605058fc707f65c7aa3e2e2317c770c94fac7b8aff6472dc"} Jan 22 13:41:19 crc kubenswrapper[4773]: I0122 13:41:19.316347 4773 generic.go:334] "Generic (PLEG): container finished" podID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerID="d3e5efac028640471e4d2994e57dc9f463ac682adb71bf42e92c3b0d08323a42" exitCode=0 Jan 22 13:41:19 crc kubenswrapper[4773]: I0122 13:41:19.316500 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" event={"ID":"9f8be3bc-66a7-44ee-9eee-61aac695d0b0","Type":"ContainerDied","Data":"d3e5efac028640471e4d2994e57dc9f463ac682adb71bf42e92c3b0d08323a42"} Jan 22 13:41:21 crc kubenswrapper[4773]: I0122 13:41:21.349269 4773 generic.go:334] "Generic (PLEG): container finished" podID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerID="b028d6148763c2430e0804121258ecfb0350f12049c6abb8dcf2ffff602884bf" exitCode=0 Jan 22 13:41:21 crc kubenswrapper[4773]: I0122 13:41:21.349393 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" event={"ID":"9f8be3bc-66a7-44ee-9eee-61aac695d0b0","Type":"ContainerDied","Data":"b028d6148763c2430e0804121258ecfb0350f12049c6abb8dcf2ffff602884bf"} Jan 22 13:41:22 crc kubenswrapper[4773]: I0122 13:41:22.366118 4773 generic.go:334] "Generic (PLEG): container finished" podID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerID="67fa8552495f8e26d728fa14621fc4fb6a864a746e5e8ce00124ca905ef82ddb" exitCode=0 Jan 22 13:41:22 crc kubenswrapper[4773]: I0122 13:41:22.366176 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" event={"ID":"9f8be3bc-66a7-44ee-9eee-61aac695d0b0","Type":"ContainerDied","Data":"67fa8552495f8e26d728fa14621fc4fb6a864a746e5e8ce00124ca905ef82ddb"} Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.708854 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.842682 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-util\") pod \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.842828 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxdn2\" (UniqueName: \"kubernetes.io/projected/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-kube-api-access-fxdn2\") pod \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.842865 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-bundle\") pod \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\" (UID: \"9f8be3bc-66a7-44ee-9eee-61aac695d0b0\") " Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.845692 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-bundle" (OuterVolumeSpecName: "bundle") pod "9f8be3bc-66a7-44ee-9eee-61aac695d0b0" (UID: "9f8be3bc-66a7-44ee-9eee-61aac695d0b0"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.850830 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-kube-api-access-fxdn2" (OuterVolumeSpecName: "kube-api-access-fxdn2") pod "9f8be3bc-66a7-44ee-9eee-61aac695d0b0" (UID: "9f8be3bc-66a7-44ee-9eee-61aac695d0b0"). InnerVolumeSpecName "kube-api-access-fxdn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.852176 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-util" (OuterVolumeSpecName: "util") pod "9f8be3bc-66a7-44ee-9eee-61aac695d0b0" (UID: "9f8be3bc-66a7-44ee-9eee-61aac695d0b0"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.946613 4773 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-util\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.946659 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxdn2\" (UniqueName: \"kubernetes.io/projected/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-kube-api-access-fxdn2\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:23 crc kubenswrapper[4773]: I0122 13:41:23.946671 4773 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f8be3bc-66a7-44ee-9eee-61aac695d0b0-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:41:24 crc kubenswrapper[4773]: I0122 13:41:24.389259 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" event={"ID":"9f8be3bc-66a7-44ee-9eee-61aac695d0b0","Type":"ContainerDied","Data":"4aaf669fc753b0e1605058fc707f65c7aa3e2e2317c770c94fac7b8aff6472dc"} Jan 22 13:41:24 crc kubenswrapper[4773]: I0122 13:41:24.389326 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4aaf669fc753b0e1605058fc707f65c7aa3e2e2317c770c94fac7b8aff6472dc" Jan 22 13:41:24 crc kubenswrapper[4773]: I0122 13:41:24.389397 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.074709 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.075179 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.075228 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.076071 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.076124 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" gracePeriod=600 Jan 22 13:41:34 crc kubenswrapper[4773]: E0122 13:41:34.212748 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.549600 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" exitCode=0 Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.549651 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810"} Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.549697 4773 scope.go:117] "RemoveContainer" containerID="cd11c4ae3e84c7a53f963cff7bf88747afac3c498d4204f8269ad676fea8c32b" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.550150 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:41:34 crc kubenswrapper[4773]: E0122 13:41:34.550612 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.947373 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2"] Jan 22 13:41:34 crc kubenswrapper[4773]: E0122 13:41:34.948226 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="extract" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.948249 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="extract" Jan 22 13:41:34 crc kubenswrapper[4773]: E0122 13:41:34.948266 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="util" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.948274 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="util" Jan 22 13:41:34 crc kubenswrapper[4773]: E0122 13:41:34.948470 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="pull" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.948485 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="pull" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.948718 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f8be3bc-66a7-44ee-9eee-61aac695d0b0" containerName="extract" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.949493 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.953593 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.953801 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-6ldfw" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.954029 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.959489 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2"] Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.997184 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc"] Jan 22 13:41:34 crc kubenswrapper[4773]: I0122 13:41:34.998510 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.001772 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-grkwc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.005389 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.023432 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.044992 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.046424 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.059204 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slqsr\" (UniqueName: \"kubernetes.io/projected/20327f73-1346-42db-9404-cb8482061e15-kube-api-access-slqsr\") pod \"obo-prometheus-operator-68bc856cb9-sf2m2\" (UID: \"20327f73-1346-42db-9404-cb8482061e15\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.083726 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.161070 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/49124815-040d-46dd-abaa-7f7dfe7e5d82-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc\" (UID: \"49124815-040d-46dd-abaa-7f7dfe7e5d82\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.161354 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/49124815-040d-46dd-abaa-7f7dfe7e5d82-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc\" (UID: \"49124815-040d-46dd-abaa-7f7dfe7e5d82\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.161513 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq\" (UID: \"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.161628 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slqsr\" (UniqueName: \"kubernetes.io/projected/20327f73-1346-42db-9404-cb8482061e15-kube-api-access-slqsr\") pod \"obo-prometheus-operator-68bc856cb9-sf2m2\" (UID: \"20327f73-1346-42db-9404-cb8482061e15\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.161682 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq\" (UID: \"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.176811 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-n2j8w"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.178609 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.181649 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.182575 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-cfckf" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.195169 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-n2j8w"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.201863 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slqsr\" (UniqueName: \"kubernetes.io/projected/20327f73-1346-42db-9404-cb8482061e15-kube-api-access-slqsr\") pod \"obo-prometheus-operator-68bc856cb9-sf2m2\" (UID: \"20327f73-1346-42db-9404-cb8482061e15\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.264079 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f2d527ad-aed8-41c2-95f7-e4399e497cc6-observability-operator-tls\") pod \"observability-operator-59bdc8b94-n2j8w\" (UID: \"f2d527ad-aed8-41c2-95f7-e4399e497cc6\") " pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.264221 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d7fh\" (UniqueName: \"kubernetes.io/projected/f2d527ad-aed8-41c2-95f7-e4399e497cc6-kube-api-access-8d7fh\") pod \"observability-operator-59bdc8b94-n2j8w\" (UID: \"f2d527ad-aed8-41c2-95f7-e4399e497cc6\") " pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.264335 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq\" (UID: \"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.264404 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/49124815-040d-46dd-abaa-7f7dfe7e5d82-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc\" (UID: \"49124815-040d-46dd-abaa-7f7dfe7e5d82\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.264479 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/49124815-040d-46dd-abaa-7f7dfe7e5d82-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc\" (UID: \"49124815-040d-46dd-abaa-7f7dfe7e5d82\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.266854 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq\" (UID: \"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.268302 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/49124815-040d-46dd-abaa-7f7dfe7e5d82-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc\" (UID: \"49124815-040d-46dd-abaa-7f7dfe7e5d82\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.270085 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq\" (UID: \"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.271199 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq\" (UID: \"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.272829 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.276175 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/49124815-040d-46dd-abaa-7f7dfe7e5d82-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc\" (UID: \"49124815-040d-46dd-abaa-7f7dfe7e5d82\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.281226 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-s5rtt"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.285785 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.289176 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-lffzb" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.303667 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-s5rtt"] Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.331847 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.368519 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r996z\" (UniqueName: \"kubernetes.io/projected/3dc143c2-3387-49e7-be22-1a05d37d8fea-kube-api-access-r996z\") pod \"perses-operator-5bf474d74f-s5rtt\" (UID: \"3dc143c2-3387-49e7-be22-1a05d37d8fea\") " pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.368876 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f2d527ad-aed8-41c2-95f7-e4399e497cc6-observability-operator-tls\") pod \"observability-operator-59bdc8b94-n2j8w\" (UID: \"f2d527ad-aed8-41c2-95f7-e4399e497cc6\") " pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.368946 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d7fh\" (UniqueName: \"kubernetes.io/projected/f2d527ad-aed8-41c2-95f7-e4399e497cc6-kube-api-access-8d7fh\") pod \"observability-operator-59bdc8b94-n2j8w\" (UID: \"f2d527ad-aed8-41c2-95f7-e4399e497cc6\") " pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.369063 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3dc143c2-3387-49e7-be22-1a05d37d8fea-openshift-service-ca\") pod \"perses-operator-5bf474d74f-s5rtt\" (UID: \"3dc143c2-3387-49e7-be22-1a05d37d8fea\") " pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.370821 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.372438 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/f2d527ad-aed8-41c2-95f7-e4399e497cc6-observability-operator-tls\") pod \"observability-operator-59bdc8b94-n2j8w\" (UID: \"f2d527ad-aed8-41c2-95f7-e4399e497cc6\") " pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.384750 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d7fh\" (UniqueName: \"kubernetes.io/projected/f2d527ad-aed8-41c2-95f7-e4399e497cc6-kube-api-access-8d7fh\") pod \"observability-operator-59bdc8b94-n2j8w\" (UID: \"f2d527ad-aed8-41c2-95f7-e4399e497cc6\") " pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.472394 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3dc143c2-3387-49e7-be22-1a05d37d8fea-openshift-service-ca\") pod \"perses-operator-5bf474d74f-s5rtt\" (UID: \"3dc143c2-3387-49e7-be22-1a05d37d8fea\") " pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.472960 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r996z\" (UniqueName: \"kubernetes.io/projected/3dc143c2-3387-49e7-be22-1a05d37d8fea-kube-api-access-r996z\") pod \"perses-operator-5bf474d74f-s5rtt\" (UID: \"3dc143c2-3387-49e7-be22-1a05d37d8fea\") " pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.474562 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3dc143c2-3387-49e7-be22-1a05d37d8fea-openshift-service-ca\") pod \"perses-operator-5bf474d74f-s5rtt\" (UID: \"3dc143c2-3387-49e7-be22-1a05d37d8fea\") " pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.500739 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r996z\" (UniqueName: \"kubernetes.io/projected/3dc143c2-3387-49e7-be22-1a05d37d8fea-kube-api-access-r996z\") pod \"perses-operator-5bf474d74f-s5rtt\" (UID: \"3dc143c2-3387-49e7-be22-1a05d37d8fea\") " pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.501264 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.597312 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:35 crc kubenswrapper[4773]: I0122 13:41:35.939704 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2"] Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.088538 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq"] Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.163550 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc"] Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.317198 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-s5rtt"] Jan 22 13:41:36 crc kubenswrapper[4773]: W0122 13:41:36.326791 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dc143c2_3387_49e7_be22_1a05d37d8fea.slice/crio-8c2a40081a6608cc53121cab42e094d180dd93f5b731d4940569b1251ced79c0 WatchSource:0}: Error finding container 8c2a40081a6608cc53121cab42e094d180dd93f5b731d4940569b1251ced79c0: Status 404 returned error can't find the container with id 8c2a40081a6608cc53121cab42e094d180dd93f5b731d4940569b1251ced79c0 Jan 22 13:41:36 crc kubenswrapper[4773]: W0122 13:41:36.328420 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2d527ad_aed8_41c2_95f7_e4399e497cc6.slice/crio-b27b224743621dd95813c1255e21925b50daa8701a22cd3e6b39e055d3ffeb52 WatchSource:0}: Error finding container b27b224743621dd95813c1255e21925b50daa8701a22cd3e6b39e055d3ffeb52: Status 404 returned error can't find the container with id b27b224743621dd95813c1255e21925b50daa8701a22cd3e6b39e055d3ffeb52 Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.342065 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-n2j8w"] Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.608876 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" event={"ID":"49124815-040d-46dd-abaa-7f7dfe7e5d82","Type":"ContainerStarted","Data":"454a4c72eeb14b86dabf27b9caa0c0fd753302f47cd6f7d30e0e53a06fd2622f"} Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.618892 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" event={"ID":"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8","Type":"ContainerStarted","Data":"a228e286b36c6932fa5eff4f2249114de3b243a76bcacb83c79c95183eb755c2"} Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.623462 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" event={"ID":"3dc143c2-3387-49e7-be22-1a05d37d8fea","Type":"ContainerStarted","Data":"8c2a40081a6608cc53121cab42e094d180dd93f5b731d4940569b1251ced79c0"} Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.641687 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" event={"ID":"f2d527ad-aed8-41c2-95f7-e4399e497cc6","Type":"ContainerStarted","Data":"b27b224743621dd95813c1255e21925b50daa8701a22cd3e6b39e055d3ffeb52"} Jan 22 13:41:36 crc kubenswrapper[4773]: I0122 13:41:36.647482 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" event={"ID":"20327f73-1346-42db-9404-cb8482061e15","Type":"ContainerStarted","Data":"312b376f300008d4c8e7da6cfeffe4299fd558bb70c5516e6a13be92f9f2fbfa"} Jan 22 13:41:37 crc kubenswrapper[4773]: I0122 13:41:37.051303 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-qk9s8"] Jan 22 13:41:37 crc kubenswrapper[4773]: I0122 13:41:37.063426 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-cc02-account-create-update-5g6hq"] Jan 22 13:41:37 crc kubenswrapper[4773]: I0122 13:41:37.073887 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-qk9s8"] Jan 22 13:41:37 crc kubenswrapper[4773]: I0122 13:41:37.083314 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-cc02-account-create-update-5g6hq"] Jan 22 13:41:38 crc kubenswrapper[4773]: I0122 13:41:38.669622 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d" path="/var/lib/kubelet/pods/62ba8b5e-cebc-4a35-a91c-5235ce3bfe7d/volumes" Jan 22 13:41:38 crc kubenswrapper[4773]: I0122 13:41:38.670589 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b97e980-5122-434b-8a97-699aa0fc5b28" path="/var/lib/kubelet/pods/9b97e980-5122-434b-8a97-699aa0fc5b28/volumes" Jan 22 13:41:45 crc kubenswrapper[4773]: I0122 13:41:45.039348 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-28mrp"] Jan 22 13:41:45 crc kubenswrapper[4773]: I0122 13:41:45.049151 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-28mrp"] Jan 22 13:41:46 crc kubenswrapper[4773]: I0122 13:41:46.671714 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9b2afff-3572-4d45-a41a-0fc77512da79" path="/var/lib/kubelet/pods/c9b2afff-3572-4d45-a41a-0fc77512da79/volumes" Jan 22 13:41:47 crc kubenswrapper[4773]: I0122 13:41:47.658376 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:41:47 crc kubenswrapper[4773]: E0122 13:41:47.659012 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.789212 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" event={"ID":"20327f73-1346-42db-9404-cb8482061e15","Type":"ContainerStarted","Data":"6db097ec6cd7f2e2005ef31aae6c948a9768ec05c5a82412d8ca1109190bbea7"} Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.792400 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" event={"ID":"49124815-040d-46dd-abaa-7f7dfe7e5d82","Type":"ContainerStarted","Data":"c408695260dd5fcd39ce23a4bc0235a110e34a16d49f39b0f2bb5e5a50a520d5"} Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.795125 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" event={"ID":"eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8","Type":"ContainerStarted","Data":"c4b24db37a016283f2e441c25e2296e1d363603cf4aa044ac91fd0ae3be2e9d5"} Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.797727 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" event={"ID":"3dc143c2-3387-49e7-be22-1a05d37d8fea","Type":"ContainerStarted","Data":"fb1ce78607b01713886a193371845888ebb1c77c619252607f069a9e5d3e20d7"} Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.797873 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.800773 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" event={"ID":"f2d527ad-aed8-41c2-95f7-e4399e497cc6","Type":"ContainerStarted","Data":"0c9738b491d90e25063c6123422275d82ec77dc6300dd9e49d9116d5b44719bb"} Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.801999 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.810022 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.826523 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-sf2m2" podStartSLOduration=3.213432103 podStartE2EDuration="14.826500678s" podCreationTimestamp="2026-01-22 13:41:34 +0000 UTC" firstStartedPulling="2026-01-22 13:41:35.994496084 +0000 UTC m=+6403.572611909" lastFinishedPulling="2026-01-22 13:41:47.607564659 +0000 UTC m=+6415.185680484" observedRunningTime="2026-01-22 13:41:48.815846039 +0000 UTC m=+6416.393961874" watchObservedRunningTime="2026-01-22 13:41:48.826500678 +0000 UTC m=+6416.404616503" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.838916 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-n2j8w" podStartSLOduration=2.545380754 podStartE2EDuration="13.838894987s" podCreationTimestamp="2026-01-22 13:41:35 +0000 UTC" firstStartedPulling="2026-01-22 13:41:36.332732271 +0000 UTC m=+6403.910848096" lastFinishedPulling="2026-01-22 13:41:47.626246504 +0000 UTC m=+6415.204362329" observedRunningTime="2026-01-22 13:41:48.833000511 +0000 UTC m=+6416.411116326" watchObservedRunningTime="2026-01-22 13:41:48.838894987 +0000 UTC m=+6416.417010812" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.884343 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" podStartSLOduration=2.607365396 podStartE2EDuration="13.884310993s" podCreationTimestamp="2026-01-22 13:41:35 +0000 UTC" firstStartedPulling="2026-01-22 13:41:36.330603871 +0000 UTC m=+6403.908719696" lastFinishedPulling="2026-01-22 13:41:47.607549468 +0000 UTC m=+6415.185665293" observedRunningTime="2026-01-22 13:41:48.855592556 +0000 UTC m=+6416.433708371" watchObservedRunningTime="2026-01-22 13:41:48.884310993 +0000 UTC m=+6416.462426818" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.895604 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq" podStartSLOduration=3.415826952 podStartE2EDuration="14.89558001s" podCreationTimestamp="2026-01-22 13:41:34 +0000 UTC" firstStartedPulling="2026-01-22 13:41:36.1277731 +0000 UTC m=+6403.705888925" lastFinishedPulling="2026-01-22 13:41:47.607526158 +0000 UTC m=+6415.185641983" observedRunningTime="2026-01-22 13:41:48.889393566 +0000 UTC m=+6416.467509391" watchObservedRunningTime="2026-01-22 13:41:48.89558001 +0000 UTC m=+6416.473695825" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.965703 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc" podStartSLOduration=5.064927422 podStartE2EDuration="14.96567848s" podCreationTimestamp="2026-01-22 13:41:34 +0000 UTC" firstStartedPulling="2026-01-22 13:41:36.216548695 +0000 UTC m=+6403.794664520" lastFinishedPulling="2026-01-22 13:41:46.117299753 +0000 UTC m=+6413.695415578" observedRunningTime="2026-01-22 13:41:48.908003039 +0000 UTC m=+6416.486118864" watchObservedRunningTime="2026-01-22 13:41:48.96567848 +0000 UTC m=+6416.543794305" Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.995602 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c8p9v"] Jan 22 13:41:48 crc kubenswrapper[4773]: I0122 13:41:48.997803 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.009276 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c8p9v"] Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.163259 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z8ll\" (UniqueName: \"kubernetes.io/projected/6e29f46a-de39-4cfb-9463-4416ca921dec-kube-api-access-9z8ll\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.163357 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-catalog-content\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.163388 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-utilities\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.265604 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z8ll\" (UniqueName: \"kubernetes.io/projected/6e29f46a-de39-4cfb-9463-4416ca921dec-kube-api-access-9z8ll\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.265699 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-catalog-content\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.265727 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-utilities\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.266469 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-utilities\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.266478 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-catalog-content\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.286439 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z8ll\" (UniqueName: \"kubernetes.io/projected/6e29f46a-de39-4cfb-9463-4416ca921dec-kube-api-access-9z8ll\") pod \"redhat-operators-c8p9v\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.330671 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:49 crc kubenswrapper[4773]: I0122 13:41:49.859600 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c8p9v"] Jan 22 13:41:50 crc kubenswrapper[4773]: I0122 13:41:50.856367 4773 generic.go:334] "Generic (PLEG): container finished" podID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerID="85718827acebbe18ae6fbd3c36e17bfffb8fefdcdab7e4b3c183f8135ee1650f" exitCode=0 Jan 22 13:41:50 crc kubenswrapper[4773]: I0122 13:41:50.857055 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerDied","Data":"85718827acebbe18ae6fbd3c36e17bfffb8fefdcdab7e4b3c183f8135ee1650f"} Jan 22 13:41:50 crc kubenswrapper[4773]: I0122 13:41:50.857199 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerStarted","Data":"e6b8c470635eb7276f809211b1bdcc68db40f7b0bceb872ac9a01927450a3ec8"} Jan 22 13:41:52 crc kubenswrapper[4773]: I0122 13:41:52.884832 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerStarted","Data":"750054c78bd169cd2b546eb1d0346c30cd233d3d485faab9d62a943271850d48"} Jan 22 13:41:55 crc kubenswrapper[4773]: I0122 13:41:55.601948 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-s5rtt" Jan 22 13:41:57 crc kubenswrapper[4773]: I0122 13:41:57.987503 4773 generic.go:334] "Generic (PLEG): container finished" podID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerID="750054c78bd169cd2b546eb1d0346c30cd233d3d485faab9d62a943271850d48" exitCode=0 Jan 22 13:41:57 crc kubenswrapper[4773]: I0122 13:41:57.988039 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerDied","Data":"750054c78bd169cd2b546eb1d0346c30cd233d3d485faab9d62a943271850d48"} Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.013985 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerStarted","Data":"88b1bd4555c954349f103c2594a2b4a3cdebe53a418d87822c54b26144104999"} Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.099576 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c8p9v" podStartSLOduration=3.515518576 podStartE2EDuration="11.099554421s" podCreationTimestamp="2026-01-22 13:41:48 +0000 UTC" firstStartedPulling="2026-01-22 13:41:50.859070095 +0000 UTC m=+6418.437185910" lastFinishedPulling="2026-01-22 13:41:58.44310593 +0000 UTC m=+6426.021221755" observedRunningTime="2026-01-22 13:41:59.094160969 +0000 UTC m=+6426.672276804" watchObservedRunningTime="2026-01-22 13:41:59.099554421 +0000 UTC m=+6426.677670246" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.331123 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.331187 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.656150 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.656650 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" containerName="openstackclient" containerID="cri-o://de5715f01ec7f8bd752a455ceeac7844b9912e726156b8572141c448ebeea943" gracePeriod=2 Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.674770 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.781098 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 13:41:59 crc kubenswrapper[4773]: E0122 13:41:59.800478 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" containerName="openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.800517 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" containerName="openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.800758 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" containerName="openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.801473 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.801563 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.842875 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.914848 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.914902 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6blg\" (UniqueName: \"kubernetes.io/projected/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-kube-api-access-n6blg\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.914965 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:41:59 crc kubenswrapper[4773]: I0122 13:41:59.914988 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config-secret\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.017876 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.017923 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6blg\" (UniqueName: \"kubernetes.io/projected/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-kube-api-access-n6blg\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.017972 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.017994 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config-secret\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.020734 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.024918 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.025829 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config-secret\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.048149 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6blg\" (UniqueName: \"kubernetes.io/projected/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-kube-api-access-n6blg\") pod \"openstackclient\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.067355 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.069098 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.071716 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2w4rj" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.082652 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.157855 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.221558 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgn4d\" (UniqueName: \"kubernetes.io/projected/516a794c-7420-49f5-aae8-faca42ece1de-kube-api-access-dgn4d\") pod \"kube-state-metrics-0\" (UID: \"516a794c-7420-49f5-aae8-faca42ece1de\") " pod="openstack/kube-state-metrics-0" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.325718 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgn4d\" (UniqueName: \"kubernetes.io/projected/516a794c-7420-49f5-aae8-faca42ece1de-kube-api-access-dgn4d\") pod \"kube-state-metrics-0\" (UID: \"516a794c-7420-49f5-aae8-faca42ece1de\") " pod="openstack/kube-state-metrics-0" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.364222 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgn4d\" (UniqueName: \"kubernetes.io/projected/516a794c-7420-49f5-aae8-faca42ece1de-kube-api-access-dgn4d\") pod \"kube-state-metrics-0\" (UID: \"516a794c-7420-49f5-aae8-faca42ece1de\") " pod="openstack/kube-state-metrics-0" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.470958 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.529530 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c8p9v" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="registry-server" probeResult="failure" output=< Jan 22 13:42:00 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:42:00 crc kubenswrapper[4773]: > Jan 22 13:42:00 crc kubenswrapper[4773]: I0122 13:42:00.691268 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:42:00 crc kubenswrapper[4773]: E0122 13:42:00.715508 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.359730 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.362010 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.374044 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-vr8kq" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.374261 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.374376 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.374544 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.375607 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.378107 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470598 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470682 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470765 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/7599c514-2d2d-4ec6-b004-a4a0d31f032a-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470804 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/7599c514-2d2d-4ec6-b004-a4a0d31f032a-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470832 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdhgn\" (UniqueName: \"kubernetes.io/projected/7599c514-2d2d-4ec6-b004-a4a0d31f032a-kube-api-access-wdhgn\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470851 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/7599c514-2d2d-4ec6-b004-a4a0d31f032a-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.470887 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.493305 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572414 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572463 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572545 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/7599c514-2d2d-4ec6-b004-a4a0d31f032a-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572585 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/7599c514-2d2d-4ec6-b004-a4a0d31f032a-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572613 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdhgn\" (UniqueName: \"kubernetes.io/projected/7599c514-2d2d-4ec6-b004-a4a0d31f032a-kube-api-access-wdhgn\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572629 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/7599c514-2d2d-4ec6-b004-a4a0d31f032a-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.572659 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.573567 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/7599c514-2d2d-4ec6-b004-a4a0d31f032a-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.583235 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.583674 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.585084 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/7599c514-2d2d-4ec6-b004-a4a0d31f032a-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.585153 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/7599c514-2d2d-4ec6-b004-a4a0d31f032a-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.585323 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/7599c514-2d2d-4ec6-b004-a4a0d31f032a-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.630351 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdhgn\" (UniqueName: \"kubernetes.io/projected/7599c514-2d2d-4ec6-b004-a4a0d31f032a-kube-api-access-wdhgn\") pod \"alertmanager-metric-storage-0\" (UID: \"7599c514-2d2d-4ec6-b004-a4a0d31f032a\") " pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.637835 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.640437 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.673020 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.678380 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.681154 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.682279 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-v4vkr" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.682357 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.682405 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.683202 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.693625 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.695493 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.701413 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.777855 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.778229 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.778387 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.778516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlzqp\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-kube-api-access-mlzqp\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.778696 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.778891 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.779047 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/545b3013-82c2-48cb-ad02-8062e2f57b76-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.779161 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.779381 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-config\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.779511 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.841965 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.884542 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.884815 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/545b3013-82c2-48cb-ad02-8062e2f57b76-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.884842 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.884920 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-config\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.884952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885035 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885075 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885110 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885131 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlzqp\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-kube-api-access-mlzqp\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885178 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885323 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.885783 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.891947 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.892843 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.893070 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.894540 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-config\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.896587 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/545b3013-82c2-48cb-ad02-8062e2f57b76-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.900122 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.910147 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.910419 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c36ab6b209fa7af719b8ad778c856d635653c7c542e81a4f3464c494fa9744d8/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:01 crc kubenswrapper[4773]: I0122 13:42:01.923007 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlzqp\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-kube-api-access-mlzqp\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.051355 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.101608 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"516a794c-7420-49f5-aae8-faca42ece1de","Type":"ContainerStarted","Data":"e8dff7816072c2af3a2e5fa0e4e6d2176ef74aa6285ace52ce272e01d8581a0c"} Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.107874 4773 generic.go:334] "Generic (PLEG): container finished" podID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" containerID="de5715f01ec7f8bd752a455ceeac7844b9912e726156b8572141c448ebeea943" exitCode=137 Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.109886 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18","Type":"ContainerStarted","Data":"7af2297ccbd7246b86ed94ef67f6048feeaa22420c3824ad7808d17c2704987c"} Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.330753 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.413305 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Jan 22 13:42:02 crc kubenswrapper[4773]: W0122 13:42:02.427218 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7599c514_2d2d_4ec6_b004_a4a0d31f032a.slice/crio-185eb5caf20a0441455ace0049d73ce2473be9e7cd94949a9982888106e376f8 WatchSource:0}: Error finding container 185eb5caf20a0441455ace0049d73ce2473be9e7cd94949a9982888106e376f8: Status 404 returned error can't find the container with id 185eb5caf20a0441455ace0049d73ce2473be9e7cd94949a9982888106e376f8 Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.759235 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.783336 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.817127 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config\") pod \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.817201 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdc86\" (UniqueName: \"kubernetes.io/projected/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-kube-api-access-mdc86\") pod \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.817389 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config-secret\") pod \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.817502 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-combined-ca-bundle\") pod \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\" (UID: \"d8cef3c1-4fe1-4d51-a20f-f154a8873fb6\") " Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.839341 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-kube-api-access-mdc86" (OuterVolumeSpecName: "kube-api-access-mdc86") pod "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" (UID: "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6"). InnerVolumeSpecName "kube-api-access-mdc86". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.855822 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" (UID: "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.874164 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" (UID: "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.921300 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.921335 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.921349 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdc86\" (UniqueName: \"kubernetes.io/projected/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-kube-api-access-mdc86\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:02 crc kubenswrapper[4773]: I0122 13:42:02.945470 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" (UID: "d8cef3c1-4fe1-4d51-a20f-f154a8873fb6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.023397 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:03 crc kubenswrapper[4773]: W0122 13:42:03.068672 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192 WatchSource:0}: Error finding container db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192: Status 404 returned error can't find the container with id db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192 Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.075147 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.136034 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18","Type":"ContainerStarted","Data":"bfe89036546c5365b8ccdda9cae590ffa17bd65b1d2dfd6cc5fd321ed0d1ccb6"} Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.141401 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"7599c514-2d2d-4ec6-b004-a4a0d31f032a","Type":"ContainerStarted","Data":"185eb5caf20a0441455ace0049d73ce2473be9e7cd94949a9982888106e376f8"} Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.144087 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerStarted","Data":"db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192"} Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.150369 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"516a794c-7420-49f5-aae8-faca42ece1de","Type":"ContainerStarted","Data":"9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c"} Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.150547 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.163497 4773 scope.go:117] "RemoveContainer" containerID="de5715f01ec7f8bd752a455ceeac7844b9912e726156b8572141c448ebeea943" Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.163734 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.173969 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=4.173948185 podStartE2EDuration="4.173948185s" podCreationTimestamp="2026-01-22 13:41:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:42:03.168698427 +0000 UTC m=+6430.746814272" watchObservedRunningTime="2026-01-22 13:42:03.173948185 +0000 UTC m=+6430.752064010" Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.195533 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" Jan 22 13:42:03 crc kubenswrapper[4773]: I0122 13:42:03.217213 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.775435014 podStartE2EDuration="3.21719177s" podCreationTimestamp="2026-01-22 13:42:00 +0000 UTC" firstStartedPulling="2026-01-22 13:42:01.824068565 +0000 UTC m=+6429.402184400" lastFinishedPulling="2026-01-22 13:42:02.265825331 +0000 UTC m=+6429.843941156" observedRunningTime="2026-01-22 13:42:03.189906533 +0000 UTC m=+6430.768022368" watchObservedRunningTime="2026-01-22 13:42:03.21719177 +0000 UTC m=+6430.795307595" Jan 22 13:42:04 crc kubenswrapper[4773]: I0122 13:42:04.671805 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8cef3c1-4fe1-4d51-a20f-f154a8873fb6" path="/var/lib/kubelet/pods/d8cef3c1-4fe1-4d51-a20f-f154a8873fb6/volumes" Jan 22 13:42:09 crc kubenswrapper[4773]: I0122 13:42:09.251377 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerStarted","Data":"f8e6e079473187b0d19029371694a9e29bf6c58638755f2924c9fd09d9138b23"} Jan 22 13:42:09 crc kubenswrapper[4773]: I0122 13:42:09.254632 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"7599c514-2d2d-4ec6-b004-a4a0d31f032a","Type":"ContainerStarted","Data":"33e015de5123c460f9e5390e573d9b5794616531fc86f44a445ecbb778a6ec70"} Jan 22 13:42:10 crc kubenswrapper[4773]: I0122 13:42:10.376087 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c8p9v" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="registry-server" probeResult="failure" output=< Jan 22 13:42:10 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:42:10 crc kubenswrapper[4773]: > Jan 22 13:42:10 crc kubenswrapper[4773]: I0122 13:42:10.476928 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.113472 4773 scope.go:117] "RemoveContainer" containerID="9b145b11eaac8d65487065181d030a20f7dce060a5f5e3777bdf9dce74db5563" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.147251 4773 scope.go:117] "RemoveContainer" containerID="35c674aa52aae1bacd2dfccb0916d5113b2326330883dc5a604491992a877d02" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.193741 4773 scope.go:117] "RemoveContainer" containerID="3ae0c7e257abcb0b792730b80e4a2f8be22c0c3bf49d85e5cb04050ecf9c9564" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.270499 4773 scope.go:117] "RemoveContainer" containerID="ea438151a539c61f22762cad63a59b35a1944363ef55a1538c7a355ac608c126" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.295145 4773 scope.go:117] "RemoveContainer" containerID="b56b44e8d048d16059f1360402019d5475e4ac29f89c86328c17b75cd157b1ff" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.352341 4773 scope.go:117] "RemoveContainer" containerID="a7c0e14c9ba87fda15ef654a70a77c273bc712bb65e7901fbff0eac3d92a7fcb" Jan 22 13:42:11 crc kubenswrapper[4773]: I0122 13:42:11.417936 4773 scope.go:117] "RemoveContainer" containerID="834a5a5e9606d2c9fa65118e06ee2eb3185115cf3698022c460cccb1d05a0a16" Jan 22 13:42:13 crc kubenswrapper[4773]: I0122 13:42:13.659025 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:42:13 crc kubenswrapper[4773]: E0122 13:42:13.659774 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:42:16 crc kubenswrapper[4773]: I0122 13:42:16.347260 4773 generic.go:334] "Generic (PLEG): container finished" podID="7599c514-2d2d-4ec6-b004-a4a0d31f032a" containerID="33e015de5123c460f9e5390e573d9b5794616531fc86f44a445ecbb778a6ec70" exitCode=0 Jan 22 13:42:16 crc kubenswrapper[4773]: I0122 13:42:16.347398 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"7599c514-2d2d-4ec6-b004-a4a0d31f032a","Type":"ContainerDied","Data":"33e015de5123c460f9e5390e573d9b5794616531fc86f44a445ecbb778a6ec70"} Jan 22 13:42:16 crc kubenswrapper[4773]: I0122 13:42:16.351443 4773 generic.go:334] "Generic (PLEG): container finished" podID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerID="f8e6e079473187b0d19029371694a9e29bf6c58638755f2924c9fd09d9138b23" exitCode=0 Jan 22 13:42:16 crc kubenswrapper[4773]: I0122 13:42:16.351502 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerDied","Data":"f8e6e079473187b0d19029371694a9e29bf6c58638755f2924c9fd09d9138b23"} Jan 22 13:42:16 crc kubenswrapper[4773]: I0122 13:42:16.352272 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:42:17 crc kubenswrapper[4773]: I0122 13:42:17.055744 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-tlcxh"] Jan 22 13:42:17 crc kubenswrapper[4773]: I0122 13:42:17.066125 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-tlcxh"] Jan 22 13:42:17 crc kubenswrapper[4773]: I0122 13:42:17.077197 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-70b4-account-create-update-vwxwn"] Jan 22 13:42:17 crc kubenswrapper[4773]: I0122 13:42:17.090280 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-70b4-account-create-update-vwxwn"] Jan 22 13:42:18 crc kubenswrapper[4773]: I0122 13:42:18.685750 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55fea942-2605-41d2-bb57-d93c99d5c4bb" path="/var/lib/kubelet/pods/55fea942-2605-41d2-bb57-d93c99d5c4bb/volumes" Jan 22 13:42:18 crc kubenswrapper[4773]: I0122 13:42:18.686949 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98ea57e8-c88e-49c3-b559-60c51c38e4cc" path="/var/lib/kubelet/pods/98ea57e8-c88e-49c3-b559-60c51c38e4cc/volumes" Jan 22 13:42:19 crc kubenswrapper[4773]: I0122 13:42:19.380397 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"7599c514-2d2d-4ec6-b004-a4a0d31f032a","Type":"ContainerStarted","Data":"9e7afb763ecd0b0142e5e3d4ad48d3595fcd2ec05098a1d34f9a6dd666e83cf4"} Jan 22 13:42:19 crc kubenswrapper[4773]: I0122 13:42:19.403656 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:42:19 crc kubenswrapper[4773]: I0122 13:42:19.461050 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:42:20 crc kubenswrapper[4773]: I0122 13:42:20.131098 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c8p9v"] Jan 22 13:42:21 crc kubenswrapper[4773]: I0122 13:42:21.398010 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c8p9v" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="registry-server" containerID="cri-o://88b1bd4555c954349f103c2594a2b4a3cdebe53a418d87822c54b26144104999" gracePeriod=2 Jan 22 13:42:23 crc kubenswrapper[4773]: I0122 13:42:23.491709 4773 generic.go:334] "Generic (PLEG): container finished" podID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerID="88b1bd4555c954349f103c2594a2b4a3cdebe53a418d87822c54b26144104999" exitCode=0 Jan 22 13:42:23 crc kubenswrapper[4773]: I0122 13:42:23.492021 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerDied","Data":"88b1bd4555c954349f103c2594a2b4a3cdebe53a418d87822c54b26144104999"} Jan 22 13:42:23 crc kubenswrapper[4773]: I0122 13:42:23.552544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"7599c514-2d2d-4ec6-b004-a4a0d31f032a","Type":"ContainerStarted","Data":"306233f2c9f2aba3ed31d583b8f38969afacb01d2e690fc3cd4ce74b6a14c0bd"} Jan 22 13:42:23 crc kubenswrapper[4773]: I0122 13:42:23.554267 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:23 crc kubenswrapper[4773]: I0122 13:42:23.560880 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Jan 22 13:42:23 crc kubenswrapper[4773]: I0122 13:42:23.595699 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.1149512 podStartE2EDuration="22.595662723s" podCreationTimestamp="2026-01-22 13:42:01 +0000 UTC" firstStartedPulling="2026-01-22 13:42:02.433343389 +0000 UTC m=+6430.011459214" lastFinishedPulling="2026-01-22 13:42:18.914054912 +0000 UTC m=+6446.492170737" observedRunningTime="2026-01-22 13:42:23.594672435 +0000 UTC m=+6451.172788260" watchObservedRunningTime="2026-01-22 13:42:23.595662723 +0000 UTC m=+6451.173778548" Jan 22 13:42:24 crc kubenswrapper[4773]: I0122 13:42:24.047151 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-tmksn"] Jan 22 13:42:24 crc kubenswrapper[4773]: I0122 13:42:24.061310 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-tmksn"] Jan 22 13:42:24 crc kubenswrapper[4773]: I0122 13:42:24.671991 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57ca37d9-91fa-4049-b3e0-d90b1b12ec11" path="/var/lib/kubelet/pods/57ca37d9-91fa-4049-b3e0-d90b1b12ec11/volumes" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.169371 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.224264 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z8ll\" (UniqueName: \"kubernetes.io/projected/6e29f46a-de39-4cfb-9463-4416ca921dec-kube-api-access-9z8ll\") pod \"6e29f46a-de39-4cfb-9463-4416ca921dec\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.224378 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-catalog-content\") pod \"6e29f46a-de39-4cfb-9463-4416ca921dec\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.224403 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-utilities\") pod \"6e29f46a-de39-4cfb-9463-4416ca921dec\" (UID: \"6e29f46a-de39-4cfb-9463-4416ca921dec\") " Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.225080 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-utilities" (OuterVolumeSpecName: "utilities") pod "6e29f46a-de39-4cfb-9463-4416ca921dec" (UID: "6e29f46a-de39-4cfb-9463-4416ca921dec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.232742 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e29f46a-de39-4cfb-9463-4416ca921dec-kube-api-access-9z8ll" (OuterVolumeSpecName: "kube-api-access-9z8ll") pod "6e29f46a-de39-4cfb-9463-4416ca921dec" (UID: "6e29f46a-de39-4cfb-9463-4416ca921dec"). InnerVolumeSpecName "kube-api-access-9z8ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.326708 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z8ll\" (UniqueName: \"kubernetes.io/projected/6e29f46a-de39-4cfb-9463-4416ca921dec-kube-api-access-9z8ll\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.326748 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.599234 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c8p9v" event={"ID":"6e29f46a-de39-4cfb-9463-4416ca921dec","Type":"ContainerDied","Data":"e6b8c470635eb7276f809211b1bdcc68db40f7b0bceb872ac9a01927450a3ec8"} Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.599666 4773 scope.go:117] "RemoveContainer" containerID="88b1bd4555c954349f103c2594a2b4a3cdebe53a418d87822c54b26144104999" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.599982 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c8p9v" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.626061 4773 scope.go:117] "RemoveContainer" containerID="750054c78bd169cd2b546eb1d0346c30cd233d3d485faab9d62a943271850d48" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.655199 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e29f46a-de39-4cfb-9463-4416ca921dec" (UID: "6e29f46a-de39-4cfb-9463-4416ca921dec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.656836 4773 scope.go:117] "RemoveContainer" containerID="85718827acebbe18ae6fbd3c36e17bfffb8fefdcdab7e4b3c183f8135ee1650f" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.740277 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e29f46a-de39-4cfb-9463-4416ca921dec-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.935980 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c8p9v"] Jan 22 13:42:27 crc kubenswrapper[4773]: I0122 13:42:27.944186 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c8p9v"] Jan 22 13:42:28 crc kubenswrapper[4773]: I0122 13:42:28.660197 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:42:28 crc kubenswrapper[4773]: E0122 13:42:28.660819 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:42:28 crc kubenswrapper[4773]: I0122 13:42:28.677615 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" path="/var/lib/kubelet/pods/6e29f46a-de39-4cfb-9463-4416ca921dec/volumes" Jan 22 13:42:30 crc kubenswrapper[4773]: I0122 13:42:30.629515 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerStarted","Data":"8fef60b28ac53a59a0bc377ef486a2c2edf0114831f2aa7b7f2a69825a9c2394"} Jan 22 13:42:34 crc kubenswrapper[4773]: I0122 13:42:34.679479 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerStarted","Data":"9a32de87b916ae730fb8a07996f023b40fde68e948d36806d8900880b176d9ee"} Jan 22 13:42:38 crc kubenswrapper[4773]: I0122 13:42:38.723729 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerStarted","Data":"761031b4d2bf830637a50039021b1d21ba46475910015fc41603c892f8972efd"} Jan 22 13:42:38 crc kubenswrapper[4773]: I0122 13:42:38.781934 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.854646437 podStartE2EDuration="38.781912295s" podCreationTimestamp="2026-01-22 13:42:00 +0000 UTC" firstStartedPulling="2026-01-22 13:42:03.071012372 +0000 UTC m=+6430.649128197" lastFinishedPulling="2026-01-22 13:42:37.99827823 +0000 UTC m=+6465.576394055" observedRunningTime="2026-01-22 13:42:38.770229816 +0000 UTC m=+6466.348345701" watchObservedRunningTime="2026-01-22 13:42:38.781912295 +0000 UTC m=+6466.360028130" Jan 22 13:42:40 crc kubenswrapper[4773]: I0122 13:42:40.658803 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:42:40 crc kubenswrapper[4773]: E0122 13:42:40.659470 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:42:42 crc kubenswrapper[4773]: I0122 13:42:42.331414 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.804526 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:42:44 crc kubenswrapper[4773]: E0122 13:42:44.805477 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="extract-content" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.805727 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="extract-content" Jan 22 13:42:44 crc kubenswrapper[4773]: E0122 13:42:44.805794 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="extract-utilities" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.805807 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="extract-utilities" Jan 22 13:42:44 crc kubenswrapper[4773]: E0122 13:42:44.805829 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="registry-server" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.805842 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="registry-server" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.806231 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e29f46a-de39-4cfb-9463-4416ca921dec" containerName="registry-server" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.809900 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.814796 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.814843 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.819756 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973220 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-scripts\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973270 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-run-httpd\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973327 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973430 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973474 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-config-data\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973609 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-log-httpd\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:44 crc kubenswrapper[4773]: I0122 13:42:44.973733 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkbdp\" (UniqueName: \"kubernetes.io/projected/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-kube-api-access-gkbdp\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076341 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-scripts\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076396 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-run-httpd\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076428 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076474 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076504 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-config-data\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076580 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-log-httpd\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.076676 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkbdp\" (UniqueName: \"kubernetes.io/projected/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-kube-api-access-gkbdp\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.077115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-run-httpd\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.077116 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-log-httpd\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.083393 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.083911 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.084419 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-scripts\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.088405 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-config-data\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.095091 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkbdp\" (UniqueName: \"kubernetes.io/projected/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-kube-api-access-gkbdp\") pod \"ceilometer-0\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.149182 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.642857 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:42:45 crc kubenswrapper[4773]: I0122 13:42:45.797343 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerStarted","Data":"62ee0a93ed2d63e767b09e4ca0372fc714435ecb3179175ce220999cc60e478c"} Jan 22 13:42:47 crc kubenswrapper[4773]: I0122 13:42:47.331808 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:47 crc kubenswrapper[4773]: I0122 13:42:47.333915 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:47 crc kubenswrapper[4773]: I0122 13:42:47.823343 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerStarted","Data":"c3017f645beb4cd384c4256392efdb0739ab8defb529c311f13071d42d19c8e6"} Jan 22 13:42:47 crc kubenswrapper[4773]: I0122 13:42:47.825201 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:48 crc kubenswrapper[4773]: I0122 13:42:48.841569 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerStarted","Data":"e82a5f1cbe3bf65f2c64731a61d61ed08be213641d8ea23411bf337c11e175ae"} Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.778070 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.778613 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" containerName="openstackclient" containerID="cri-o://bfe89036546c5365b8ccdda9cae590ffa17bd65b1d2dfd6cc5fd321ed0d1ccb6" gracePeriod=2 Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.781837 4773 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" podUID="88b4759c-3fdf-4b34-9420-03eb7c3dd19d" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.795573 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.806019 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 22 13:42:49 crc kubenswrapper[4773]: E0122 13:42:49.819015 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" containerName="openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.819061 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" containerName="openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.819401 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" containerName="openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.820237 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.846718 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.857796 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerStarted","Data":"f58559c136e66b7439b156f2dadf87302528a7311b7601b294fb4376c40541a8"} Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.890001 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-openstack-config-secret\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.890096 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvfjg\" (UniqueName: \"kubernetes.io/projected/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-kube-api-access-pvfjg\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.890155 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.890352 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-openstack-config\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.992924 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvfjg\" (UniqueName: \"kubernetes.io/projected/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-kube-api-access-pvfjg\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.992992 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.993126 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-openstack-config\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.993250 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-openstack-config-secret\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:49 crc kubenswrapper[4773]: I0122 13:42:49.995099 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-openstack-config\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:50 crc kubenswrapper[4773]: I0122 13:42:50.008976 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-openstack-config-secret\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:50 crc kubenswrapper[4773]: I0122 13:42:50.015928 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:50 crc kubenswrapper[4773]: I0122 13:42:50.023143 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvfjg\" (UniqueName: \"kubernetes.io/projected/88b4759c-3fdf-4b34-9420-03eb7c3dd19d-kube-api-access-pvfjg\") pod \"openstackclient\" (UID: \"88b4759c-3fdf-4b34-9420-03eb7c3dd19d\") " pod="openstack/openstackclient" Jan 22 13:42:50 crc kubenswrapper[4773]: I0122 13:42:50.158237 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:50 crc kubenswrapper[4773]: I0122 13:42:50.756271 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 22 13:42:50 crc kubenswrapper[4773]: W0122 13:42:50.765583 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88b4759c_3fdf_4b34_9420_03eb7c3dd19d.slice/crio-288a8dc624e74ae3b9623830ced8482e6b769bd955c0dc548b684311c62203b6 WatchSource:0}: Error finding container 288a8dc624e74ae3b9623830ced8482e6b769bd955c0dc548b684311c62203b6: Status 404 returned error can't find the container with id 288a8dc624e74ae3b9623830ced8482e6b769bd955c0dc548b684311c62203b6 Jan 22 13:42:50 crc kubenswrapper[4773]: I0122 13:42:50.873723 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"88b4759c-3fdf-4b34-9420-03eb7c3dd19d","Type":"ContainerStarted","Data":"288a8dc624e74ae3b9623830ced8482e6b769bd955c0dc548b684311c62203b6"} Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.148738 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.149364 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="prometheus" containerID="cri-o://8fef60b28ac53a59a0bc377ef486a2c2edf0114831f2aa7b7f2a69825a9c2394" gracePeriod=600 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.149482 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="thanos-sidecar" containerID="cri-o://761031b4d2bf830637a50039021b1d21ba46475910015fc41603c892f8972efd" gracePeriod=600 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.149476 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="config-reloader" containerID="cri-o://9a32de87b916ae730fb8a07996f023b40fde68e948d36806d8900880b176d9ee" gracePeriod=600 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.912453 4773 generic.go:334] "Generic (PLEG): container finished" podID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerID="761031b4d2bf830637a50039021b1d21ba46475910015fc41603c892f8972efd" exitCode=0 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.912706 4773 generic.go:334] "Generic (PLEG): container finished" podID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerID="9a32de87b916ae730fb8a07996f023b40fde68e948d36806d8900880b176d9ee" exitCode=0 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.912716 4773 generic.go:334] "Generic (PLEG): container finished" podID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerID="8fef60b28ac53a59a0bc377ef486a2c2edf0114831f2aa7b7f2a69825a9c2394" exitCode=0 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.912767 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerDied","Data":"761031b4d2bf830637a50039021b1d21ba46475910015fc41603c892f8972efd"} Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.912805 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerDied","Data":"9a32de87b916ae730fb8a07996f023b40fde68e948d36806d8900880b176d9ee"} Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.912815 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerDied","Data":"8fef60b28ac53a59a0bc377ef486a2c2edf0114831f2aa7b7f2a69825a9c2394"} Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.919744 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerStarted","Data":"42e42b2767b492994eaf126461a0003eb8374425f4557be77418f0b845cda307"} Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.920783 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.922530 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"88b4759c-3fdf-4b34-9420-03eb7c3dd19d","Type":"ContainerStarted","Data":"dcfabd0995c3bf2f5b0fcf73f6b951220d88c981b1797bb1451fc7ed5a129f35"} Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.934558 4773 generic.go:334] "Generic (PLEG): container finished" podID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" containerID="bfe89036546c5365b8ccdda9cae590ffa17bd65b1d2dfd6cc5fd321ed0d1ccb6" exitCode=137 Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.971350 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.942215677 podStartE2EDuration="7.971328213s" podCreationTimestamp="2026-01-22 13:42:44 +0000 UTC" firstStartedPulling="2026-01-22 13:42:45.647132217 +0000 UTC m=+6473.225248042" lastFinishedPulling="2026-01-22 13:42:50.676244753 +0000 UTC m=+6478.254360578" observedRunningTime="2026-01-22 13:42:51.95022136 +0000 UTC m=+6479.528337185" watchObservedRunningTime="2026-01-22 13:42:51.971328213 +0000 UTC m=+6479.549444038" Jan 22 13:42:51 crc kubenswrapper[4773]: I0122 13:42:51.981868 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.981847349 podStartE2EDuration="2.981847349s" podCreationTimestamp="2026-01-22 13:42:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:42:51.970444218 +0000 UTC m=+6479.548560043" watchObservedRunningTime="2026-01-22 13:42:51.981847349 +0000 UTC m=+6479.559963174" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.172778 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.245991 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-combined-ca-bundle\") pod \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.246099 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config\") pod \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.246133 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config-secret\") pod \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.246301 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6blg\" (UniqueName: \"kubernetes.io/projected/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-kube-api-access-n6blg\") pod \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\" (UID: \"f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.255694 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-kube-api-access-n6blg" (OuterVolumeSpecName: "kube-api-access-n6blg") pod "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" (UID: "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18"). InnerVolumeSpecName "kube-api-access-n6blg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.266545 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.279619 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" (UID: "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.304907 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" (UID: "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.331674 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" (UID: "f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348563 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlzqp\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-kube-api-access-mlzqp\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348613 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-tls-assets\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348661 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-0\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348703 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/545b3013-82c2-48cb-ad02-8062e2f57b76-config-out\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348723 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-2\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348753 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-web-config\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348779 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-config\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348828 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-thanos-prometheus-http-client-file\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.348876 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-1\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349006 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"545b3013-82c2-48cb-ad02-8062e2f57b76\" (UID: \"545b3013-82c2-48cb-ad02-8062e2f57b76\") " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349531 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349790 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6blg\" (UniqueName: \"kubernetes.io/projected/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-kube-api-access-n6blg\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349808 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349818 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349827 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.349838 4773 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.351825 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-kube-api-access-mlzqp" (OuterVolumeSpecName: "kube-api-access-mlzqp") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "kube-api-access-mlzqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.354119 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.354163 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.354188 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.355397 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-config" (OuterVolumeSpecName: "config") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.355888 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.357258 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/545b3013-82c2-48cb-ad02-8062e2f57b76-config-out" (OuterVolumeSpecName: "config-out") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.429699 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.432610 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-web-config" (OuterVolumeSpecName: "web-config") pod "545b3013-82c2-48cb-ad02-8062e2f57b76" (UID: "545b3013-82c2-48cb-ad02-8062e2f57b76"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.452733 4773 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.452950 4773 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453104 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") on node \"crc\" " Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453192 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlzqp\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-kube-api-access-mlzqp\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453259 4773 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/545b3013-82c2-48cb-ad02-8062e2f57b76-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453343 4773 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/545b3013-82c2-48cb-ad02-8062e2f57b76-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453403 4773 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/545b3013-82c2-48cb-ad02-8062e2f57b76-config-out\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453459 4773 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-web-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.453515 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/545b3013-82c2-48cb-ad02-8062e2f57b76-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.480574 4773 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.480758 4773 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e") on node "crc" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.556167 4773 reconciler_common.go:293] "Volume detached for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") on node \"crc\" DevicePath \"\"" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.672398 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18" path="/var/lib/kubelet/pods/f109a5c0-b59c-4a1e-b9bc-0a1bb014ea18/volumes" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.945800 4773 scope.go:117] "RemoveContainer" containerID="bfe89036546c5365b8ccdda9cae590ffa17bd65b1d2dfd6cc5fd321ed0d1ccb6" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.945880 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.950947 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"545b3013-82c2-48cb-ad02-8062e2f57b76","Type":"ContainerDied","Data":"db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192"} Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.952215 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.980001 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.989057 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:52 crc kubenswrapper[4773]: I0122 13:42:52.989118 4773 scope.go:117] "RemoveContainer" containerID="761031b4d2bf830637a50039021b1d21ba46475910015fc41603c892f8972efd" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.044504 4773 scope.go:117] "RemoveContainer" containerID="9a32de87b916ae730fb8a07996f023b40fde68e948d36806d8900880b176d9ee" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.046351 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:53 crc kubenswrapper[4773]: E0122 13:42:53.046884 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="config-reloader" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.046897 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="config-reloader" Jan 22 13:42:53 crc kubenswrapper[4773]: E0122 13:42:53.046910 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="init-config-reloader" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.046917 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="init-config-reloader" Jan 22 13:42:53 crc kubenswrapper[4773]: E0122 13:42:53.046933 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="prometheus" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.046940 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="prometheus" Jan 22 13:42:53 crc kubenswrapper[4773]: E0122 13:42:53.046957 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="thanos-sidecar" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.046963 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="thanos-sidecar" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.047149 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="config-reloader" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.047160 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="thanos-sidecar" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.047171 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" containerName="prometheus" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.049086 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.055440 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.055461 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.055654 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.055834 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.057540 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.057659 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.058215 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.067185 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-v4vkr" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.069513 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.085331 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.112665 4773 scope.go:117] "RemoveContainer" containerID="8fef60b28ac53a59a0bc377ef486a2c2edf0114831f2aa7b7f2a69825a9c2394" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.145120 4773 scope.go:117] "RemoveContainer" containerID="f8e6e079473187b0d19029371694a9e29bf6c58638755f2924c9fd09d9138b23" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.172825 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.172877 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.172906 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.172956 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.172999 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173033 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173088 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr4vt\" (UniqueName: \"kubernetes.io/projected/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-kube-api-access-jr4vt\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173112 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173141 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-config\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173157 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173175 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173197 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.173225 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275528 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275604 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275639 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275712 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr4vt\" (UniqueName: \"kubernetes.io/projected/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-kube-api-access-jr4vt\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275742 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275775 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-config\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275791 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275808 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275838 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275871 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275919 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.275976 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.276002 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.279578 4773 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.279610 4773 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c36ab6b209fa7af719b8ad778c856d635653c7c542e81a4f3464c494fa9744d8/globalmount\"" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.281225 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.283402 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.283633 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-1" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.283808 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.283815 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.283996 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.284127 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-2" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.284269 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.289420 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.289771 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.289987 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.292418 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.294075 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-config\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.294485 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.295864 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.298270 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.301264 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.304938 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.305018 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr4vt\" (UniqueName: \"kubernetes.io/projected/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-kube-api-access-jr4vt\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.311999 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.342580 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c063c7e3-2701-460e-ae6b-6e381b097e0e\") pod \"prometheus-metric-storage-0\" (UID: \"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a\") " pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.393952 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-v4vkr" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.403153 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.892261 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Jan 22 13:42:53 crc kubenswrapper[4773]: W0122 13:42:53.909145 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4aef4bbd_0a65_4d1f_9846_b18a4dc4bb9a.slice/crio-686ea075b1e5a75b08bb85c8857d3af710eb1106af68ffb1ee45d20fb667d900 WatchSource:0}: Error finding container 686ea075b1e5a75b08bb85c8857d3af710eb1106af68ffb1ee45d20fb667d900: Status 404 returned error can't find the container with id 686ea075b1e5a75b08bb85c8857d3af710eb1106af68ffb1ee45d20fb667d900 Jan 22 13:42:53 crc kubenswrapper[4773]: I0122 13:42:53.965116 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a","Type":"ContainerStarted","Data":"686ea075b1e5a75b08bb85c8857d3af710eb1106af68ffb1ee45d20fb667d900"} Jan 22 13:42:54 crc kubenswrapper[4773]: E0122 13:42:54.387226 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf109a5c0_b59c_4a1e_b9bc_0a1bb014ea18.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192\": RecentStats: unable to find data in memory cache]" Jan 22 13:42:54 crc kubenswrapper[4773]: I0122 13:42:54.676560 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="545b3013-82c2-48cb-ad02-8062e2f57b76" path="/var/lib/kubelet/pods/545b3013-82c2-48cb-ad02-8062e2f57b76/volumes" Jan 22 13:42:55 crc kubenswrapper[4773]: I0122 13:42:55.658650 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:42:55 crc kubenswrapper[4773]: E0122 13:42:55.658984 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.456356 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-gwgqx"] Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.458015 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.464644 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-gwgqx"] Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.558101 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmc9m\" (UniqueName: \"kubernetes.io/projected/768bc147-1241-4613-a1c0-13b218690eb8-kube-api-access-zmc9m\") pod \"aodh-db-create-gwgqx\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.558538 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768bc147-1241-4613-a1c0-13b218690eb8-operator-scripts\") pod \"aodh-db-create-gwgqx\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.655681 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-fc02-account-create-update-l6hb2"] Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.657159 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.659429 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.660488 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768bc147-1241-4613-a1c0-13b218690eb8-operator-scripts\") pod \"aodh-db-create-gwgqx\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.660604 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmc9m\" (UniqueName: \"kubernetes.io/projected/768bc147-1241-4613-a1c0-13b218690eb8-kube-api-access-zmc9m\") pod \"aodh-db-create-gwgqx\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.661670 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768bc147-1241-4613-a1c0-13b218690eb8-operator-scripts\") pod \"aodh-db-create-gwgqx\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.689780 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmc9m\" (UniqueName: \"kubernetes.io/projected/768bc147-1241-4613-a1c0-13b218690eb8-kube-api-access-zmc9m\") pod \"aodh-db-create-gwgqx\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.689862 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-fc02-account-create-update-l6hb2"] Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.763140 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8s5\" (UniqueName: \"kubernetes.io/projected/476de395-9692-4d5d-9283-91f71b6d049b-kube-api-access-jc8s5\") pod \"aodh-fc02-account-create-update-l6hb2\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.764509 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/476de395-9692-4d5d-9283-91f71b6d049b-operator-scripts\") pod \"aodh-fc02-account-create-update-l6hb2\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.777171 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-gwgqx" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.866417 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8s5\" (UniqueName: \"kubernetes.io/projected/476de395-9692-4d5d-9283-91f71b6d049b-kube-api-access-jc8s5\") pod \"aodh-fc02-account-create-update-l6hb2\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.866541 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/476de395-9692-4d5d-9283-91f71b6d049b-operator-scripts\") pod \"aodh-fc02-account-create-update-l6hb2\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.867763 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/476de395-9692-4d5d-9283-91f71b6d049b-operator-scripts\") pod \"aodh-fc02-account-create-update-l6hb2\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.900311 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8s5\" (UniqueName: \"kubernetes.io/projected/476de395-9692-4d5d-9283-91f71b6d049b-kube-api-access-jc8s5\") pod \"aodh-fc02-account-create-update-l6hb2\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:56 crc kubenswrapper[4773]: I0122 13:42:56.985601 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:42:57 crc kubenswrapper[4773]: I0122 13:42:57.434065 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-gwgqx"] Jan 22 13:42:57 crc kubenswrapper[4773]: I0122 13:42:57.518434 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-fc02-account-create-update-l6hb2"] Jan 22 13:42:57 crc kubenswrapper[4773]: W0122 13:42:57.603727 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod476de395_9692_4d5d_9283_91f71b6d049b.slice/crio-8892b816a7c270f7f80296a70f461f304f264b19878532eb93307994cbf45092 WatchSource:0}: Error finding container 8892b816a7c270f7f80296a70f461f304f264b19878532eb93307994cbf45092: Status 404 returned error can't find the container with id 8892b816a7c270f7f80296a70f461f304f264b19878532eb93307994cbf45092 Jan 22 13:42:58 crc kubenswrapper[4773]: I0122 13:42:58.039983 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-fc02-account-create-update-l6hb2" event={"ID":"476de395-9692-4d5d-9283-91f71b6d049b","Type":"ContainerStarted","Data":"36741715a434ecd8c59a9ec8c0735d450ec657fd242d2e1b0d998bcddf22a548"} Jan 22 13:42:58 crc kubenswrapper[4773]: I0122 13:42:58.040299 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-fc02-account-create-update-l6hb2" event={"ID":"476de395-9692-4d5d-9283-91f71b6d049b","Type":"ContainerStarted","Data":"8892b816a7c270f7f80296a70f461f304f264b19878532eb93307994cbf45092"} Jan 22 13:42:58 crc kubenswrapper[4773]: I0122 13:42:58.042964 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-gwgqx" event={"ID":"768bc147-1241-4613-a1c0-13b218690eb8","Type":"ContainerStarted","Data":"d5983520ccfe71aeeef3f25b4ee0de79de89ac13a1aaac17deaa2426eda22d76"} Jan 22 13:42:58 crc kubenswrapper[4773]: I0122 13:42:58.043007 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-gwgqx" event={"ID":"768bc147-1241-4613-a1c0-13b218690eb8","Type":"ContainerStarted","Data":"53a31423025b7d5c44ffbab25cd67f355f47994f4e159b4ba6b885eca2deb174"} Jan 22 13:42:58 crc kubenswrapper[4773]: I0122 13:42:58.069432 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-fc02-account-create-update-l6hb2" podStartSLOduration=2.069410784 podStartE2EDuration="2.069410784s" podCreationTimestamp="2026-01-22 13:42:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:42:58.056476281 +0000 UTC m=+6485.634592116" watchObservedRunningTime="2026-01-22 13:42:58.069410784 +0000 UTC m=+6485.647526609" Jan 22 13:42:58 crc kubenswrapper[4773]: I0122 13:42:58.080955 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-create-gwgqx" podStartSLOduration=2.080929188 podStartE2EDuration="2.080929188s" podCreationTimestamp="2026-01-22 13:42:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:42:58.073956492 +0000 UTC m=+6485.652072337" watchObservedRunningTime="2026-01-22 13:42:58.080929188 +0000 UTC m=+6485.659045013" Jan 22 13:42:59 crc kubenswrapper[4773]: I0122 13:42:59.056996 4773 generic.go:334] "Generic (PLEG): container finished" podID="476de395-9692-4d5d-9283-91f71b6d049b" containerID="36741715a434ecd8c59a9ec8c0735d450ec657fd242d2e1b0d998bcddf22a548" exitCode=0 Jan 22 13:42:59 crc kubenswrapper[4773]: I0122 13:42:59.057435 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-fc02-account-create-update-l6hb2" event={"ID":"476de395-9692-4d5d-9283-91f71b6d049b","Type":"ContainerDied","Data":"36741715a434ecd8c59a9ec8c0735d450ec657fd242d2e1b0d998bcddf22a548"} Jan 22 13:42:59 crc kubenswrapper[4773]: I0122 13:42:59.067096 4773 generic.go:334] "Generic (PLEG): container finished" podID="768bc147-1241-4613-a1c0-13b218690eb8" containerID="d5983520ccfe71aeeef3f25b4ee0de79de89ac13a1aaac17deaa2426eda22d76" exitCode=0 Jan 22 13:42:59 crc kubenswrapper[4773]: I0122 13:42:59.067194 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-gwgqx" event={"ID":"768bc147-1241-4613-a1c0-13b218690eb8","Type":"ContainerDied","Data":"d5983520ccfe71aeeef3f25b4ee0de79de89ac13a1aaac17deaa2426eda22d76"} Jan 22 13:42:59 crc kubenswrapper[4773]: I0122 13:42:59.070725 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a","Type":"ContainerStarted","Data":"85cb47dba2ad045a992259b2b5b5fde72f6f5d7b02657df23372d0ae8428da05"} Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.538202 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-gwgqx" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.559203 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.667168 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc8s5\" (UniqueName: \"kubernetes.io/projected/476de395-9692-4d5d-9283-91f71b6d049b-kube-api-access-jc8s5\") pod \"476de395-9692-4d5d-9283-91f71b6d049b\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.667238 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmc9m\" (UniqueName: \"kubernetes.io/projected/768bc147-1241-4613-a1c0-13b218690eb8-kube-api-access-zmc9m\") pod \"768bc147-1241-4613-a1c0-13b218690eb8\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.667420 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/476de395-9692-4d5d-9283-91f71b6d049b-operator-scripts\") pod \"476de395-9692-4d5d-9283-91f71b6d049b\" (UID: \"476de395-9692-4d5d-9283-91f71b6d049b\") " Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.667473 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768bc147-1241-4613-a1c0-13b218690eb8-operator-scripts\") pod \"768bc147-1241-4613-a1c0-13b218690eb8\" (UID: \"768bc147-1241-4613-a1c0-13b218690eb8\") " Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.668464 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/476de395-9692-4d5d-9283-91f71b6d049b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "476de395-9692-4d5d-9283-91f71b6d049b" (UID: "476de395-9692-4d5d-9283-91f71b6d049b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.668013 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/768bc147-1241-4613-a1c0-13b218690eb8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "768bc147-1241-4613-a1c0-13b218690eb8" (UID: "768bc147-1241-4613-a1c0-13b218690eb8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.690197 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/476de395-9692-4d5d-9283-91f71b6d049b-kube-api-access-jc8s5" (OuterVolumeSpecName: "kube-api-access-jc8s5") pod "476de395-9692-4d5d-9283-91f71b6d049b" (UID: "476de395-9692-4d5d-9283-91f71b6d049b"). InnerVolumeSpecName "kube-api-access-jc8s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.691045 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/768bc147-1241-4613-a1c0-13b218690eb8-kube-api-access-zmc9m" (OuterVolumeSpecName: "kube-api-access-zmc9m") pod "768bc147-1241-4613-a1c0-13b218690eb8" (UID: "768bc147-1241-4613-a1c0-13b218690eb8"). InnerVolumeSpecName "kube-api-access-zmc9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.772804 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc8s5\" (UniqueName: \"kubernetes.io/projected/476de395-9692-4d5d-9283-91f71b6d049b-kube-api-access-jc8s5\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.772842 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmc9m\" (UniqueName: \"kubernetes.io/projected/768bc147-1241-4613-a1c0-13b218690eb8-kube-api-access-zmc9m\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.772857 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/476de395-9692-4d5d-9283-91f71b6d049b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:00 crc kubenswrapper[4773]: I0122 13:43:00.772869 4773 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/768bc147-1241-4613-a1c0-13b218690eb8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.097407 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-gwgqx" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.097408 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-gwgqx" event={"ID":"768bc147-1241-4613-a1c0-13b218690eb8","Type":"ContainerDied","Data":"53a31423025b7d5c44ffbab25cd67f355f47994f4e159b4ba6b885eca2deb174"} Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.097531 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53a31423025b7d5c44ffbab25cd67f355f47994f4e159b4ba6b885eca2deb174" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.099817 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-fc02-account-create-update-l6hb2" event={"ID":"476de395-9692-4d5d-9283-91f71b6d049b","Type":"ContainerDied","Data":"8892b816a7c270f7f80296a70f461f304f264b19878532eb93307994cbf45092"} Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.099873 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8892b816a7c270f7f80296a70f461f304f264b19878532eb93307994cbf45092" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.099955 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-fc02-account-create-update-l6hb2" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.913259 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-sn4wr"] Jan 22 13:43:01 crc kubenswrapper[4773]: E0122 13:43:01.913844 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="768bc147-1241-4613-a1c0-13b218690eb8" containerName="mariadb-database-create" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.913863 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="768bc147-1241-4613-a1c0-13b218690eb8" containerName="mariadb-database-create" Jan 22 13:43:01 crc kubenswrapper[4773]: E0122 13:43:01.913898 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="476de395-9692-4d5d-9283-91f71b6d049b" containerName="mariadb-account-create-update" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.913908 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="476de395-9692-4d5d-9283-91f71b6d049b" containerName="mariadb-account-create-update" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.914165 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="768bc147-1241-4613-a1c0-13b218690eb8" containerName="mariadb-database-create" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.914206 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="476de395-9692-4d5d-9283-91f71b6d049b" containerName="mariadb-account-create-update" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.915126 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.918010 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-dtqmr" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.918034 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.918788 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.919977 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 22 13:43:01 crc kubenswrapper[4773]: I0122 13:43:01.928176 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-sn4wr"] Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.022967 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-config-data\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.023012 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-scripts\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.023136 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58fh7\" (UniqueName: \"kubernetes.io/projected/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-kube-api-access-58fh7\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.023205 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-combined-ca-bundle\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.125771 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58fh7\" (UniqueName: \"kubernetes.io/projected/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-kube-api-access-58fh7\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.125903 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-combined-ca-bundle\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.126002 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-config-data\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.126033 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-scripts\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.130033 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-combined-ca-bundle\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.130908 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-config-data\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.131117 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-scripts\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.144592 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58fh7\" (UniqueName: \"kubernetes.io/projected/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-kube-api-access-58fh7\") pod \"aodh-db-sync-sn4wr\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.240655 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:02 crc kubenswrapper[4773]: I0122 13:43:02.760686 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-sn4wr"] Jan 22 13:43:03 crc kubenswrapper[4773]: I0122 13:43:03.119846 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-sn4wr" event={"ID":"5e072ba5-f7d6-4378-9f71-b283a1a37bc0","Type":"ContainerStarted","Data":"92af9f1d103688b0a0bc86d50434c6d92e1d7668e2e6a5dc68a94a98c25914a8"} Jan 22 13:43:04 crc kubenswrapper[4773]: E0122 13:43:04.675374 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf109a5c0_b59c_4a1e_b9bc_0a1bb014ea18.slice\": RecentStats: unable to find data in memory cache]" Jan 22 13:43:06 crc kubenswrapper[4773]: I0122 13:43:06.157651 4773 generic.go:334] "Generic (PLEG): container finished" podID="4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a" containerID="85cb47dba2ad045a992259b2b5b5fde72f6f5d7b02657df23372d0ae8428da05" exitCode=0 Jan 22 13:43:06 crc kubenswrapper[4773]: I0122 13:43:06.157752 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a","Type":"ContainerDied","Data":"85cb47dba2ad045a992259b2b5b5fde72f6f5d7b02657df23372d0ae8428da05"} Jan 22 13:43:06 crc kubenswrapper[4773]: I0122 13:43:06.658653 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:43:06 crc kubenswrapper[4773]: E0122 13:43:06.659452 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:43:07 crc kubenswrapper[4773]: I0122 13:43:07.169172 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-sn4wr" event={"ID":"5e072ba5-f7d6-4378-9f71-b283a1a37bc0","Type":"ContainerStarted","Data":"d7acd7d4a3ee3f37fc2c39f7160bcfba6fae907b5c51a1f62eb3369393100de5"} Jan 22 13:43:07 crc kubenswrapper[4773]: I0122 13:43:07.174463 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a","Type":"ContainerStarted","Data":"d9c511870a56900efe8df9206b374a66987c1fe1d6de00e7c4c5dd7e20602ee3"} Jan 22 13:43:07 crc kubenswrapper[4773]: I0122 13:43:07.191347 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-sn4wr" podStartSLOduration=2.576583209 podStartE2EDuration="6.191324624s" podCreationTimestamp="2026-01-22 13:43:01 +0000 UTC" firstStartedPulling="2026-01-22 13:43:02.764060212 +0000 UTC m=+6490.342176037" lastFinishedPulling="2026-01-22 13:43:06.378801627 +0000 UTC m=+6493.956917452" observedRunningTime="2026-01-22 13:43:07.187919718 +0000 UTC m=+6494.766035593" watchObservedRunningTime="2026-01-22 13:43:07.191324624 +0000 UTC m=+6494.769440459" Jan 22 13:43:09 crc kubenswrapper[4773]: I0122 13:43:09.222327 4773 generic.go:334] "Generic (PLEG): container finished" podID="5e072ba5-f7d6-4378-9f71-b283a1a37bc0" containerID="d7acd7d4a3ee3f37fc2c39f7160bcfba6fae907b5c51a1f62eb3369393100de5" exitCode=0 Jan 22 13:43:09 crc kubenswrapper[4773]: I0122 13:43:09.222403 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-sn4wr" event={"ID":"5e072ba5-f7d6-4378-9f71-b283a1a37bc0","Type":"ContainerDied","Data":"d7acd7d4a3ee3f37fc2c39f7160bcfba6fae907b5c51a1f62eb3369393100de5"} Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.657529 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.732760 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-config-data\") pod \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.732876 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-scripts\") pod \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.732991 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-combined-ca-bundle\") pod \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.733061 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58fh7\" (UniqueName: \"kubernetes.io/projected/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-kube-api-access-58fh7\") pod \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\" (UID: \"5e072ba5-f7d6-4378-9f71-b283a1a37bc0\") " Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.742299 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-scripts" (OuterVolumeSpecName: "scripts") pod "5e072ba5-f7d6-4378-9f71-b283a1a37bc0" (UID: "5e072ba5-f7d6-4378-9f71-b283a1a37bc0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.742473 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-kube-api-access-58fh7" (OuterVolumeSpecName: "kube-api-access-58fh7") pod "5e072ba5-f7d6-4378-9f71-b283a1a37bc0" (UID: "5e072ba5-f7d6-4378-9f71-b283a1a37bc0"). InnerVolumeSpecName "kube-api-access-58fh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.770506 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e072ba5-f7d6-4378-9f71-b283a1a37bc0" (UID: "5e072ba5-f7d6-4378-9f71-b283a1a37bc0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.777800 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-config-data" (OuterVolumeSpecName: "config-data") pod "5e072ba5-f7d6-4378-9f71-b283a1a37bc0" (UID: "5e072ba5-f7d6-4378-9f71-b283a1a37bc0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.835577 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58fh7\" (UniqueName: \"kubernetes.io/projected/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-kube-api-access-58fh7\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.835620 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.835629 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:10 crc kubenswrapper[4773]: I0122 13:43:10.835654 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e072ba5-f7d6-4378-9f71-b283a1a37bc0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.257182 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-sn4wr" event={"ID":"5e072ba5-f7d6-4378-9f71-b283a1a37bc0","Type":"ContainerDied","Data":"92af9f1d103688b0a0bc86d50434c6d92e1d7668e2e6a5dc68a94a98c25914a8"} Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.257228 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92af9f1d103688b0a0bc86d50434c6d92e1d7668e2e6a5dc68a94a98c25914a8" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.257327 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-sn4wr" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.559402 4773 scope.go:117] "RemoveContainer" containerID="1192aad70877cc6f305e6f5eaab1aaef6d7d514ac3e5c98a96c80ac4978a8be3" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.624511 4773 scope.go:117] "RemoveContainer" containerID="36faddcba4325b02c2eb374cc80a53b512107b7bd78e35cce56b7b647853cd9b" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.670833 4773 scope.go:117] "RemoveContainer" containerID="329c449ce303338e0507c2f42208035d62e3137c28fbea301712edfb46222a26" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.752161 4773 scope.go:117] "RemoveContainer" containerID="f023786d20596fa62037b57318b35bac9a95bfb1c95b50e36480b32abb111f9b" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.998419 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:11 crc kubenswrapper[4773]: E0122 13:43:11.999135 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e072ba5-f7d6-4378-9f71-b283a1a37bc0" containerName="aodh-db-sync" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.999156 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e072ba5-f7d6-4378-9f71-b283a1a37bc0" containerName="aodh-db-sync" Jan 22 13:43:11 crc kubenswrapper[4773]: I0122 13:43:11.999537 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e072ba5-f7d6-4378-9f71-b283a1a37bc0" containerName="aodh-db-sync" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.002226 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.004576 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-dtqmr" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.005065 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.005161 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.006842 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.066458 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-config-data\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.066577 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-combined-ca-bundle\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.066681 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w924p\" (UniqueName: \"kubernetes.io/projected/269fea45-fd6d-4fd7-a896-da9f0d41d928-kube-api-access-w924p\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.066825 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-scripts\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.168971 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-config-data\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.169656 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-combined-ca-bundle\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.169845 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w924p\" (UniqueName: \"kubernetes.io/projected/269fea45-fd6d-4fd7-a896-da9f0d41d928-kube-api-access-w924p\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.170196 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-scripts\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.183464 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-scripts\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.190855 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-combined-ca-bundle\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.201427 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-config-data\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.214227 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w924p\" (UniqueName: \"kubernetes.io/projected/269fea45-fd6d-4fd7-a896-da9f0d41d928-kube-api-access-w924p\") pod \"aodh-0\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.331656 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 13:43:12 crc kubenswrapper[4773]: I0122 13:43:12.908502 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:13 crc kubenswrapper[4773]: I0122 13:43:13.284546 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a","Type":"ContainerStarted","Data":"cbc138b83a58f4985bd04131b8ddc5394e2107435443d6a8fe76b55b9a47d949"} Jan 22 13:43:13 crc kubenswrapper[4773]: I0122 13:43:13.284597 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a","Type":"ContainerStarted","Data":"a1693d46026d8ab4fb2d271d9023f0a861ad9efadb473d2a3b961a455b6f065d"} Jan 22 13:43:13 crc kubenswrapper[4773]: I0122 13:43:13.286346 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerStarted","Data":"e337df4d5dd015bdbf9718f8428107cbc59b55d58231464fdd0219d210c608ef"} Jan 22 13:43:13 crc kubenswrapper[4773]: I0122 13:43:13.314453 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=21.314427868 podStartE2EDuration="21.314427868s" podCreationTimestamp="2026-01-22 13:42:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:43:13.314336216 +0000 UTC m=+6500.892452031" watchObservedRunningTime="2026-01-22 13:43:13.314427868 +0000 UTC m=+6500.892543693" Jan 22 13:43:13 crc kubenswrapper[4773]: I0122 13:43:13.404442 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.078082 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.079303 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="sg-core" containerID="cri-o://f58559c136e66b7439b156f2dadf87302528a7311b7601b294fb4376c40541a8" gracePeriod=30 Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.079416 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="proxy-httpd" containerID="cri-o://42e42b2767b492994eaf126461a0003eb8374425f4557be77418f0b845cda307" gracePeriod=30 Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.079315 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-notification-agent" containerID="cri-o://e82a5f1cbe3bf65f2c64731a61d61ed08be213641d8ea23411bf337c11e175ae" gracePeriod=30 Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.079572 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-central-agent" containerID="cri-o://c3017f645beb4cd384c4256392efdb0739ab8defb529c311f13071d42d19c8e6" gracePeriod=30 Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.182096 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.1.151:3000/\": read tcp 10.217.0.2:40028->10.217.1.151:3000: read: connection reset by peer" Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.298819 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerStarted","Data":"89878cec347e959ef683eff8e250d97f3f9277bead8e98879fc6d3d96fa6513a"} Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.302337 4773 generic.go:334] "Generic (PLEG): container finished" podID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerID="f58559c136e66b7439b156f2dadf87302528a7311b7601b294fb4376c40541a8" exitCode=2 Jan 22 13:43:14 crc kubenswrapper[4773]: I0122 13:43:14.302408 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerDied","Data":"f58559c136e66b7439b156f2dadf87302528a7311b7601b294fb4376c40541a8"} Jan 22 13:43:14 crc kubenswrapper[4773]: E0122 13:43:14.950127 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf109a5c0_b59c_4a1e_b9bc_0a1bb014ea18.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192\": RecentStats: unable to find data in memory cache]" Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.149545 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.1.151:3000/\": dial tcp 10.217.1.151:3000: connect: connection refused" Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.323959 4773 generic.go:334] "Generic (PLEG): container finished" podID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerID="42e42b2767b492994eaf126461a0003eb8374425f4557be77418f0b845cda307" exitCode=0 Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.323996 4773 generic.go:334] "Generic (PLEG): container finished" podID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerID="c3017f645beb4cd384c4256392efdb0739ab8defb529c311f13071d42d19c8e6" exitCode=0 Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.324054 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerDied","Data":"42e42b2767b492994eaf126461a0003eb8374425f4557be77418f0b845cda307"} Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.324081 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerDied","Data":"c3017f645beb4cd384c4256392efdb0739ab8defb529c311f13071d42d19c8e6"} Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.328125 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerStarted","Data":"f5a7903526ca67dcdf751c97aa257c3a8920389a78e152cc6d8f98c7f0a3c7b2"} Jan 22 13:43:15 crc kubenswrapper[4773]: I0122 13:43:15.665352 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:16 crc kubenswrapper[4773]: I0122 13:43:16.365508 4773 generic.go:334] "Generic (PLEG): container finished" podID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerID="e82a5f1cbe3bf65f2c64731a61d61ed08be213641d8ea23411bf337c11e175ae" exitCode=0 Jan 22 13:43:16 crc kubenswrapper[4773]: I0122 13:43:16.365591 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerDied","Data":"e82a5f1cbe3bf65f2c64731a61d61ed08be213641d8ea23411bf337c11e175ae"} Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.173055 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.204217 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-log-httpd\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.204441 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-config-data\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.204972 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-sg-core-conf-yaml\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.205083 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-scripts\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.204986 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.205180 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-run-httpd\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.205210 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkbdp\" (UniqueName: \"kubernetes.io/projected/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-kube-api-access-gkbdp\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.205258 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-combined-ca-bundle\") pod \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\" (UID: \"3f0beac6-9234-46e0-86bc-e61e4dbbaf90\") " Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.205709 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.206567 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.206593 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.212463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-scripts" (OuterVolumeSpecName: "scripts") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.216623 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-kube-api-access-gkbdp" (OuterVolumeSpecName: "kube-api-access-gkbdp") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "kube-api-access-gkbdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.273040 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.309026 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.309061 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.309073 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkbdp\" (UniqueName: \"kubernetes.io/projected/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-kube-api-access-gkbdp\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.328320 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.383096 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.383076 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f0beac6-9234-46e0-86bc-e61e4dbbaf90","Type":"ContainerDied","Data":"62ee0a93ed2d63e767b09e4ca0372fc714435ecb3179175ce220999cc60e478c"} Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.383181 4773 scope.go:117] "RemoveContainer" containerID="42e42b2767b492994eaf126461a0003eb8374425f4557be77418f0b845cda307" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.386090 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-config-data" (OuterVolumeSpecName: "config-data") pod "3f0beac6-9234-46e0-86bc-e61e4dbbaf90" (UID: "3f0beac6-9234-46e0-86bc-e61e4dbbaf90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.387017 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerStarted","Data":"c953f2b447b207bccfea73fabaaeb9924bf75d9089a16af9549f7a45e1660f21"} Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.411209 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.411258 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f0beac6-9234-46e0-86bc-e61e4dbbaf90-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.495394 4773 scope.go:117] "RemoveContainer" containerID="f58559c136e66b7439b156f2dadf87302528a7311b7601b294fb4376c40541a8" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.528466 4773 scope.go:117] "RemoveContainer" containerID="e82a5f1cbe3bf65f2c64731a61d61ed08be213641d8ea23411bf337c11e175ae" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.550510 4773 scope.go:117] "RemoveContainer" containerID="c3017f645beb4cd384c4256392efdb0739ab8defb529c311f13071d42d19c8e6" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.658601 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:43:17 crc kubenswrapper[4773]: E0122 13:43:17.658978 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.740198 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.759340 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770051 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:17 crc kubenswrapper[4773]: E0122 13:43:17.770573 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-notification-agent" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770595 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-notification-agent" Jan 22 13:43:17 crc kubenswrapper[4773]: E0122 13:43:17.770617 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="sg-core" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770627 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="sg-core" Jan 22 13:43:17 crc kubenswrapper[4773]: E0122 13:43:17.770643 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-central-agent" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770652 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-central-agent" Jan 22 13:43:17 crc kubenswrapper[4773]: E0122 13:43:17.770660 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="proxy-httpd" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770666 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="proxy-httpd" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770855 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-notification-agent" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770872 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="sg-core" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770880 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="ceilometer-central-agent" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.770896 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" containerName="proxy-httpd" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.772962 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.776789 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.782718 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.798755 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.823630 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c9pb\" (UniqueName: \"kubernetes.io/projected/e5052b30-d1ac-4771-9ace-f47e3511c4dc-kube-api-access-4c9pb\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.823683 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.823729 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-config-data\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.824034 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-scripts\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.824121 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.824195 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-log-httpd\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.824510 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-run-httpd\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927118 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-config-data\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927260 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-scripts\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927326 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927381 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-log-httpd\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927543 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-run-httpd\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927701 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c9pb\" (UniqueName: \"kubernetes.io/projected/e5052b30-d1ac-4771-9ace-f47e3511c4dc-kube-api-access-4c9pb\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.927746 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.928145 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-log-httpd\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.928427 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-run-httpd\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.932382 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.934691 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.940375 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-scripts\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.941248 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-config-data\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:17 crc kubenswrapper[4773]: I0122 13:43:17.942972 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c9pb\" (UniqueName: \"kubernetes.io/projected/e5052b30-d1ac-4771-9ace-f47e3511c4dc-kube-api-access-4c9pb\") pod \"ceilometer-0\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " pod="openstack/ceilometer-0" Jan 22 13:43:18 crc kubenswrapper[4773]: I0122 13:43:18.101791 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:18 crc kubenswrapper[4773]: I0122 13:43:18.368414 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:18 crc kubenswrapper[4773]: I0122 13:43:18.670491 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f0beac6-9234-46e0-86bc-e61e4dbbaf90" path="/var/lib/kubelet/pods/3f0beac6-9234-46e0-86bc-e61e4dbbaf90/volumes" Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.420460 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerStarted","Data":"c61e7897787a79e6ec8186f04a436b0fda0f9af844de00b6add5e145cd051b76"} Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.420696 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-evaluator" containerID="cri-o://f5a7903526ca67dcdf751c97aa257c3a8920389a78e152cc6d8f98c7f0a3c7b2" gracePeriod=30 Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.420653 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-listener" containerID="cri-o://c61e7897787a79e6ec8186f04a436b0fda0f9af844de00b6add5e145cd051b76" gracePeriod=30 Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.420689 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-notifier" containerID="cri-o://c953f2b447b207bccfea73fabaaeb9924bf75d9089a16af9549f7a45e1660f21" gracePeriod=30 Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.421331 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-api" containerID="cri-o://89878cec347e959ef683eff8e250d97f3f9277bead8e98879fc6d3d96fa6513a" gracePeriod=30 Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.458710 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.225744925 podStartE2EDuration="8.458688016s" podCreationTimestamp="2026-01-22 13:43:11 +0000 UTC" firstStartedPulling="2026-01-22 13:43:12.915228468 +0000 UTC m=+6500.493344293" lastFinishedPulling="2026-01-22 13:43:19.148171519 +0000 UTC m=+6506.726287384" observedRunningTime="2026-01-22 13:43:19.455132606 +0000 UTC m=+6507.033248441" watchObservedRunningTime="2026-01-22 13:43:19.458688016 +0000 UTC m=+6507.036803831" Jan 22 13:43:19 crc kubenswrapper[4773]: I0122 13:43:19.600601 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.434009 4773 generic.go:334] "Generic (PLEG): container finished" podID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerID="c953f2b447b207bccfea73fabaaeb9924bf75d9089a16af9549f7a45e1660f21" exitCode=0 Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.434517 4773 generic.go:334] "Generic (PLEG): container finished" podID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerID="f5a7903526ca67dcdf751c97aa257c3a8920389a78e152cc6d8f98c7f0a3c7b2" exitCode=0 Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.434527 4773 generic.go:334] "Generic (PLEG): container finished" podID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerID="89878cec347e959ef683eff8e250d97f3f9277bead8e98879fc6d3d96fa6513a" exitCode=0 Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.434103 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerDied","Data":"c953f2b447b207bccfea73fabaaeb9924bf75d9089a16af9549f7a45e1660f21"} Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.434614 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerDied","Data":"f5a7903526ca67dcdf751c97aa257c3a8920389a78e152cc6d8f98c7f0a3c7b2"} Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.434629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerDied","Data":"89878cec347e959ef683eff8e250d97f3f9277bead8e98879fc6d3d96fa6513a"} Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.436694 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerStarted","Data":"77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf"} Jan 22 13:43:20 crc kubenswrapper[4773]: I0122 13:43:20.436740 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerStarted","Data":"212fc303a044e46afa89337782bc90cb930221299a66adfa10f6eae8a10b366a"} Jan 22 13:43:21 crc kubenswrapper[4773]: I0122 13:43:21.447724 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerStarted","Data":"38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd"} Jan 22 13:43:22 crc kubenswrapper[4773]: I0122 13:43:22.468093 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerStarted","Data":"149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a"} Jan 22 13:43:23 crc kubenswrapper[4773]: I0122 13:43:23.404447 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Jan 22 13:43:23 crc kubenswrapper[4773]: I0122 13:43:23.409564 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Jan 22 13:43:23 crc kubenswrapper[4773]: I0122 13:43:23.483522 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.048358 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-b3c5-account-create-update-q77bn"] Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.059739 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-b3c5-account-create-update-q77bn"] Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.492373 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerStarted","Data":"ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1"} Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.492685 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-central-agent" containerID="cri-o://77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf" gracePeriod=30 Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.492705 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="sg-core" containerID="cri-o://149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a" gracePeriod=30 Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.492752 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-notification-agent" containerID="cri-o://38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd" gracePeriod=30 Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.492711 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="proxy-httpd" containerID="cri-o://ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1" gracePeriod=30 Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.530265 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.732466447 podStartE2EDuration="7.530237307s" podCreationTimestamp="2026-01-22 13:43:17 +0000 UTC" firstStartedPulling="2026-01-22 13:43:19.604608477 +0000 UTC m=+6507.182724312" lastFinishedPulling="2026-01-22 13:43:23.402379347 +0000 UTC m=+6510.980495172" observedRunningTime="2026-01-22 13:43:24.511935502 +0000 UTC m=+6512.090051357" watchObservedRunningTime="2026-01-22 13:43:24.530237307 +0000 UTC m=+6512.108353132" Jan 22 13:43:24 crc kubenswrapper[4773]: I0122 13:43:24.670723 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="454aa928-a6e0-4eaf-ab53-170bbed3d372" path="/var/lib/kubelet/pods/454aa928-a6e0-4eaf-ab53-170bbed3d372/volumes" Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.042419 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f139-account-create-update-ngwbq"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.074713 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-9j8t9"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.086657 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f139-account-create-update-ngwbq"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.097670 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pd7zj"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.106571 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-c795l"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.114590 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-e9ca-account-create-update-8wgdg"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.122868 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pd7zj"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.132382 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-c795l"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.141444 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-9j8t9"] Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.151026 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-e9ca-account-create-update-8wgdg"] Jan 22 13:43:25 crc kubenswrapper[4773]: E0122 13:43:25.278734 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf109a5c0_b59c_4a1e_b9bc_0a1bb014ea18.slice\": RecentStats: unable to find data in memory cache]" Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.505729 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerID="ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1" exitCode=0 Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.505773 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerID="149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a" exitCode=2 Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.505783 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerID="38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd" exitCode=0 Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.505791 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerDied","Data":"ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1"} Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.505835 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerDied","Data":"149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a"} Jan 22 13:43:25 crc kubenswrapper[4773]: I0122 13:43:25.505863 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerDied","Data":"38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd"} Jan 22 13:43:26 crc kubenswrapper[4773]: I0122 13:43:26.694736 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f323d06-9ccd-47eb-9cc2-1195d6d87e75" path="/var/lib/kubelet/pods/1f323d06-9ccd-47eb-9cc2-1195d6d87e75/volumes" Jan 22 13:43:26 crc kubenswrapper[4773]: I0122 13:43:26.698438 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="317ed68c-c01c-46b2-b8d3-ba753fb1e963" path="/var/lib/kubelet/pods/317ed68c-c01c-46b2-b8d3-ba753fb1e963/volumes" Jan 22 13:43:26 crc kubenswrapper[4773]: I0122 13:43:26.709836 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45217af6-253f-424f-b146-2f26a66fa9df" path="/var/lib/kubelet/pods/45217af6-253f-424f-b146-2f26a66fa9df/volumes" Jan 22 13:43:26 crc kubenswrapper[4773]: I0122 13:43:26.713755 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b75d7951-484e-4300-980f-1d6fd90fafda" path="/var/lib/kubelet/pods/b75d7951-484e-4300-980f-1d6fd90fafda/volumes" Jan 22 13:43:26 crc kubenswrapper[4773]: I0122 13:43:26.721697 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c08aea5e-4b5a-4973-8307-71c38dec2718" path="/var/lib/kubelet/pods/c08aea5e-4b5a-4973-8307-71c38dec2718/volumes" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.332592 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467531 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c9pb\" (UniqueName: \"kubernetes.io/projected/e5052b30-d1ac-4771-9ace-f47e3511c4dc-kube-api-access-4c9pb\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467651 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-log-httpd\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467706 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-scripts\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467764 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-sg-core-conf-yaml\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467807 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-combined-ca-bundle\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467828 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-run-httpd\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.467929 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-config-data\") pod \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\" (UID: \"e5052b30-d1ac-4771-9ace-f47e3511c4dc\") " Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.469549 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.469868 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.477501 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-scripts" (OuterVolumeSpecName: "scripts") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.477592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5052b30-d1ac-4771-9ace-f47e3511c4dc-kube-api-access-4c9pb" (OuterVolumeSpecName: "kube-api-access-4c9pb") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "kube-api-access-4c9pb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.506720 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.543842 4773 generic.go:334] "Generic (PLEG): container finished" podID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerID="77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf" exitCode=0 Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.543944 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.543978 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerDied","Data":"77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf"} Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.544269 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e5052b30-d1ac-4771-9ace-f47e3511c4dc","Type":"ContainerDied","Data":"212fc303a044e46afa89337782bc90cb930221299a66adfa10f6eae8a10b366a"} Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.544345 4773 scope.go:117] "RemoveContainer" containerID="ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.565470 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.571155 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c9pb\" (UniqueName: \"kubernetes.io/projected/e5052b30-d1ac-4771-9ace-f47e3511c4dc-kube-api-access-4c9pb\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.571193 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.571206 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.571217 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.571226 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.571234 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5052b30-d1ac-4771-9ace-f47e3511c4dc-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.591782 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-config-data" (OuterVolumeSpecName: "config-data") pod "e5052b30-d1ac-4771-9ace-f47e3511c4dc" (UID: "e5052b30-d1ac-4771-9ace-f47e3511c4dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.621484 4773 scope.go:117] "RemoveContainer" containerID="149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.644639 4773 scope.go:117] "RemoveContainer" containerID="38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.665683 4773 scope.go:117] "RemoveContainer" containerID="77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.673458 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5052b30-d1ac-4771-9ace-f47e3511c4dc-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.688748 4773 scope.go:117] "RemoveContainer" containerID="ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.689195 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1\": container with ID starting with ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1 not found: ID does not exist" containerID="ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.689234 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1"} err="failed to get container status \"ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1\": rpc error: code = NotFound desc = could not find container \"ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1\": container with ID starting with ea94ad0597400326bd82aba45aa78ecd0c6b58825fff0092127c8424810d14b1 not found: ID does not exist" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.689257 4773 scope.go:117] "RemoveContainer" containerID="149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.689722 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a\": container with ID starting with 149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a not found: ID does not exist" containerID="149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.689766 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a"} err="failed to get container status \"149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a\": rpc error: code = NotFound desc = could not find container \"149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a\": container with ID starting with 149ef151825849b2436fc57c06f137e70601816f5f4d8b016c41982e33aff19a not found: ID does not exist" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.689800 4773 scope.go:117] "RemoveContainer" containerID="38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.690127 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd\": container with ID starting with 38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd not found: ID does not exist" containerID="38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.690170 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd"} err="failed to get container status \"38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd\": rpc error: code = NotFound desc = could not find container \"38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd\": container with ID starting with 38e2c667f1e375ad0350083179daefa503842bc8d2ea85e0ab517c332721fedd not found: ID does not exist" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.690200 4773 scope.go:117] "RemoveContainer" containerID="77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.690490 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf\": container with ID starting with 77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf not found: ID does not exist" containerID="77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.690512 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf"} err="failed to get container status \"77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf\": rpc error: code = NotFound desc = could not find container \"77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf\": container with ID starting with 77432cd6c8c191e1a98d927f8ab804b2b9f499f85ea60b2a961ecd37cad27baf not found: ID does not exist" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.889945 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.913755 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.939376 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.940053 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-central-agent" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940079 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-central-agent" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.940109 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="proxy-httpd" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940118 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="proxy-httpd" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.940157 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-notification-agent" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940166 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-notification-agent" Jan 22 13:43:28 crc kubenswrapper[4773]: E0122 13:43:28.940181 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="sg-core" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940189 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="sg-core" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940472 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="sg-core" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940508 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-central-agent" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940529 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="proxy-httpd" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.940561 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" containerName="ceilometer-notification-agent" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.943351 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.946260 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.948337 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.965194 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.983490 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-log-httpd\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.983649 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-run-httpd\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.983717 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nznvb\" (UniqueName: \"kubernetes.io/projected/07d47c0f-c8b6-4850-a950-692906949bdd-kube-api-access-nznvb\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.983810 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.983996 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-scripts\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.984078 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-config-data\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:28 crc kubenswrapper[4773]: I0122 13:43:28.984178 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.087765 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-run-httpd\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.087852 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nznvb\" (UniqueName: \"kubernetes.io/projected/07d47c0f-c8b6-4850-a950-692906949bdd-kube-api-access-nznvb\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.087941 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.087992 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-scripts\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.088022 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-config-data\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.088065 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.088136 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-log-httpd\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.088408 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-run-httpd\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.088629 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-log-httpd\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.093229 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.093275 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.095363 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-scripts\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.096559 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-config-data\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.108459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nznvb\" (UniqueName: \"kubernetes.io/projected/07d47c0f-c8b6-4850-a950-692906949bdd-kube-api-access-nznvb\") pod \"ceilometer-0\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.287661 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:43:29 crc kubenswrapper[4773]: I0122 13:43:29.760583 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:43:29 crc kubenswrapper[4773]: W0122 13:43:29.774659 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07d47c0f_c8b6_4850_a950_692906949bdd.slice/crio-4064579bc3be456fc7a6e54289002a223ca01739b5f2c3637334c94a64200eeb WatchSource:0}: Error finding container 4064579bc3be456fc7a6e54289002a223ca01739b5f2c3637334c94a64200eeb: Status 404 returned error can't find the container with id 4064579bc3be456fc7a6e54289002a223ca01739b5f2c3637334c94a64200eeb Jan 22 13:43:30 crc kubenswrapper[4773]: I0122 13:43:30.573982 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerStarted","Data":"2db2434d68aeb15c856d58768f25a6bf5dd6e609d79b76abc9b6a13aeaf07158"} Jan 22 13:43:30 crc kubenswrapper[4773]: I0122 13:43:30.574686 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerStarted","Data":"4064579bc3be456fc7a6e54289002a223ca01739b5f2c3637334c94a64200eeb"} Jan 22 13:43:30 crc kubenswrapper[4773]: I0122 13:43:30.677962 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:43:30 crc kubenswrapper[4773]: E0122 13:43:30.678337 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:43:30 crc kubenswrapper[4773]: I0122 13:43:30.703062 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5052b30-d1ac-4771-9ace-f47e3511c4dc" path="/var/lib/kubelet/pods/e5052b30-d1ac-4771-9ace-f47e3511c4dc/volumes" Jan 22 13:43:31 crc kubenswrapper[4773]: I0122 13:43:31.585987 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerStarted","Data":"5196ef9045bc2fee474875509a25dd3bfceceb9c30c1df98a41666c3e138001e"} Jan 22 13:43:32 crc kubenswrapper[4773]: I0122 13:43:32.596651 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerStarted","Data":"fdff99889a56b0b9d67065673a29bbece2e044cd61c51318b56efdef625126cf"} Jan 22 13:43:33 crc kubenswrapper[4773]: I0122 13:43:33.612177 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerStarted","Data":"d02b99b5e05e1d6452897d62a49b07ecaedcfcc3164fe335b7f82d835f4e5d25"} Jan 22 13:43:33 crc kubenswrapper[4773]: I0122 13:43:33.612801 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 13:43:34 crc kubenswrapper[4773]: I0122 13:43:34.037556 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.717773171 podStartE2EDuration="6.037530786s" podCreationTimestamp="2026-01-22 13:43:28 +0000 UTC" firstStartedPulling="2026-01-22 13:43:29.781693112 +0000 UTC m=+6517.359808947" lastFinishedPulling="2026-01-22 13:43:33.101450737 +0000 UTC m=+6520.679566562" observedRunningTime="2026-01-22 13:43:33.631881455 +0000 UTC m=+6521.209997290" watchObservedRunningTime="2026-01-22 13:43:34.037530786 +0000 UTC m=+6521.615646611" Jan 22 13:43:34 crc kubenswrapper[4773]: I0122 13:43:34.041155 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-zw5ls"] Jan 22 13:43:34 crc kubenswrapper[4773]: I0122 13:43:34.054127 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-zw5ls"] Jan 22 13:43:34 crc kubenswrapper[4773]: I0122 13:43:34.680634 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f6d50c-c34c-438d-9e1a-994b1948f410" path="/var/lib/kubelet/pods/14f6d50c-c34c-438d-9e1a-994b1948f410/volumes" Jan 22 13:43:35 crc kubenswrapper[4773]: E0122 13:43:35.591026 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf109a5c0_b59c_4a1e_b9bc_0a1bb014ea18.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice\": RecentStats: unable to find data in memory cache]" Jan 22 13:43:41 crc kubenswrapper[4773]: I0122 13:43:41.657894 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:43:41 crc kubenswrapper[4773]: E0122 13:43:41.658700 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:43:45 crc kubenswrapper[4773]: E0122 13:43:45.872607 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf109a5c0_b59c_4a1e_b9bc_0a1bb014ea18.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod545b3013_82c2_48cb_ad02_8062e2f57b76.slice/crio-db51c88cdae328b82c205e6a1f976d11ca892bd544f9b40d34016b24be9c1192\": RecentStats: unable to find data in memory cache]" Jan 22 13:43:49 crc kubenswrapper[4773]: I0122 13:43:49.831360 4773 generic.go:334] "Generic (PLEG): container finished" podID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerID="c61e7897787a79e6ec8186f04a436b0fda0f9af844de00b6add5e145cd051b76" exitCode=137 Jan 22 13:43:49 crc kubenswrapper[4773]: I0122 13:43:49.831407 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerDied","Data":"c61e7897787a79e6ec8186f04a436b0fda0f9af844de00b6add5e145cd051b76"} Jan 22 13:43:49 crc kubenswrapper[4773]: I0122 13:43:49.936247 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.048883 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-config-data\") pod \"269fea45-fd6d-4fd7-a896-da9f0d41d928\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.048955 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-scripts\") pod \"269fea45-fd6d-4fd7-a896-da9f0d41d928\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.048980 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-combined-ca-bundle\") pod \"269fea45-fd6d-4fd7-a896-da9f0d41d928\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.049040 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w924p\" (UniqueName: \"kubernetes.io/projected/269fea45-fd6d-4fd7-a896-da9f0d41d928-kube-api-access-w924p\") pod \"269fea45-fd6d-4fd7-a896-da9f0d41d928\" (UID: \"269fea45-fd6d-4fd7-a896-da9f0d41d928\") " Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.060326 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/269fea45-fd6d-4fd7-a896-da9f0d41d928-kube-api-access-w924p" (OuterVolumeSpecName: "kube-api-access-w924p") pod "269fea45-fd6d-4fd7-a896-da9f0d41d928" (UID: "269fea45-fd6d-4fd7-a896-da9f0d41d928"). InnerVolumeSpecName "kube-api-access-w924p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.071575 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-scripts" (OuterVolumeSpecName: "scripts") pod "269fea45-fd6d-4fd7-a896-da9f0d41d928" (UID: "269fea45-fd6d-4fd7-a896-da9f0d41d928"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.151810 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.152145 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w924p\" (UniqueName: \"kubernetes.io/projected/269fea45-fd6d-4fd7-a896-da9f0d41d928-kube-api-access-w924p\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.197535 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-config-data" (OuterVolumeSpecName: "config-data") pod "269fea45-fd6d-4fd7-a896-da9f0d41d928" (UID: "269fea45-fd6d-4fd7-a896-da9f0d41d928"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.209479 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "269fea45-fd6d-4fd7-a896-da9f0d41d928" (UID: "269fea45-fd6d-4fd7-a896-da9f0d41d928"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.254833 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.255175 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269fea45-fd6d-4fd7-a896-da9f0d41d928-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.852162 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"269fea45-fd6d-4fd7-a896-da9f0d41d928","Type":"ContainerDied","Data":"e337df4d5dd015bdbf9718f8428107cbc59b55d58231464fdd0219d210c608ef"} Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.852809 4773 scope.go:117] "RemoveContainer" containerID="c61e7897787a79e6ec8186f04a436b0fda0f9af844de00b6add5e145cd051b76" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.852384 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.936238 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.947496 4773 scope.go:117] "RemoveContainer" containerID="c953f2b447b207bccfea73fabaaeb9924bf75d9089a16af9549f7a45e1660f21" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.963250 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.971537 4773 scope.go:117] "RemoveContainer" containerID="f5a7903526ca67dcdf751c97aa257c3a8920389a78e152cc6d8f98c7f0a3c7b2" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.983429 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:50 crc kubenswrapper[4773]: E0122 13:43:50.983916 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-listener" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.983934 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-listener" Jan 22 13:43:50 crc kubenswrapper[4773]: E0122 13:43:50.983950 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-api" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.983956 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-api" Jan 22 13:43:50 crc kubenswrapper[4773]: E0122 13:43:50.983990 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-evaluator" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.983996 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-evaluator" Jan 22 13:43:50 crc kubenswrapper[4773]: E0122 13:43:50.984022 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-notifier" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.984027 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-notifier" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.984228 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-listener" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.984245 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-notifier" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.984253 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-evaluator" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.984269 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" containerName="aodh-api" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.989163 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.995157 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.995369 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.995186 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-dtqmr" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.995722 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.996574 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Jan 22 13:43:50 crc kubenswrapper[4773]: I0122 13:43:50.999149 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.006637 4773 scope.go:117] "RemoveContainer" containerID="89878cec347e959ef683eff8e250d97f3f9277bead8e98879fc6d3d96fa6513a" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.077913 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-internal-tls-certs\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.079532 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-scripts\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.079745 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28f5k\" (UniqueName: \"kubernetes.io/projected/26a329e1-f0b6-46bb-98c5-6c094855f241-kube-api-access-28f5k\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.079930 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-combined-ca-bundle\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.080057 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-public-tls-certs\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.080137 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-config-data\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.182701 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-combined-ca-bundle\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.182824 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-public-tls-certs\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.182890 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-config-data\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.183032 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-internal-tls-certs\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.183139 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-scripts\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.183305 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28f5k\" (UniqueName: \"kubernetes.io/projected/26a329e1-f0b6-46bb-98c5-6c094855f241-kube-api-access-28f5k\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.187979 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-internal-tls-certs\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.188921 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-public-tls-certs\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.189680 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-combined-ca-bundle\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.189929 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-config-data\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.195537 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26a329e1-f0b6-46bb-98c5-6c094855f241-scripts\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.203641 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28f5k\" (UniqueName: \"kubernetes.io/projected/26a329e1-f0b6-46bb-98c5-6c094855f241-kube-api-access-28f5k\") pod \"aodh-0\" (UID: \"26a329e1-f0b6-46bb-98c5-6c094855f241\") " pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.324394 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Jan 22 13:43:51 crc kubenswrapper[4773]: I0122 13:43:51.925342 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Jan 22 13:43:51 crc kubenswrapper[4773]: W0122 13:43:51.933697 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26a329e1_f0b6_46bb_98c5_6c094855f241.slice/crio-0339c1739448dc29258cc8953bd746b7b79d256a8ff8578ff9a22c7663a9bd5d WatchSource:0}: Error finding container 0339c1739448dc29258cc8953bd746b7b79d256a8ff8578ff9a22c7663a9bd5d: Status 404 returned error can't find the container with id 0339c1739448dc29258cc8953bd746b7b79d256a8ff8578ff9a22c7663a9bd5d Jan 22 13:43:52 crc kubenswrapper[4773]: I0122 13:43:52.699879 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="269fea45-fd6d-4fd7-a896-da9f0d41d928" path="/var/lib/kubelet/pods/269fea45-fd6d-4fd7-a896-da9f0d41d928/volumes" Jan 22 13:43:52 crc kubenswrapper[4773]: E0122 13:43:52.706627 4773 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/58e5917668272d233b07f18a1c47193db06b62de422c1f1a4d505857f9c6a303/diff" to get inode usage: stat /var/lib/containers/storage/overlay/58e5917668272d233b07f18a1c47193db06b62de422c1f1a4d505857f9c6a303/diff: no such file or directory, extraDiskErr: Jan 22 13:43:52 crc kubenswrapper[4773]: I0122 13:43:52.900488 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"26a329e1-f0b6-46bb-98c5-6c094855f241","Type":"ContainerStarted","Data":"0339c1739448dc29258cc8953bd746b7b79d256a8ff8578ff9a22c7663a9bd5d"} Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.044436 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s4ssz"] Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.058637 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-s4ssz"] Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.349419 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b6b5ff785-qwfwg"] Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.351404 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.354068 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.364073 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6b5ff785-qwfwg"] Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.469368 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-dns-svc\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.469662 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.469687 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-config\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.469724 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-468mh\" (UniqueName: \"kubernetes.io/projected/2ea5428a-d493-4da7-9844-bdf9a53e0f45-kube-api-access-468mh\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.469773 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-openstack-cell1\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.469805 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.571141 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-dns-svc\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.571209 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.571236 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-config\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.571271 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-468mh\" (UniqueName: \"kubernetes.io/projected/2ea5428a-d493-4da7-9844-bdf9a53e0f45-kube-api-access-468mh\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.571342 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-openstack-cell1\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.571381 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.572331 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.572895 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-dns-svc\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.573433 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.575161 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-openstack-cell1\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.575409 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-config\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.593125 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-468mh\" (UniqueName: \"kubernetes.io/projected/2ea5428a-d493-4da7-9844-bdf9a53e0f45-kube-api-access-468mh\") pod \"dnsmasq-dns-5b6b5ff785-qwfwg\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.674800 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.957030 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"26a329e1-f0b6-46bb-98c5-6c094855f241","Type":"ContainerStarted","Data":"d923cac2881ee2c131b6f753ec4f845a8d3deb8c0171b8c4eedbb4ef688d8115"} Jan 22 13:43:53 crc kubenswrapper[4773]: I0122 13:43:53.957409 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"26a329e1-f0b6-46bb-98c5-6c094855f241","Type":"ContainerStarted","Data":"bedc1f44937c433942834fbaf33f250e87fd5c9f26ab162b0965acef46048c4a"} Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.036274 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-c6gl6"] Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.045391 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-c6gl6"] Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.205357 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6b5ff785-qwfwg"] Jan 22 13:43:54 crc kubenswrapper[4773]: W0122 13:43:54.212604 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ea5428a_d493_4da7_9844_bdf9a53e0f45.slice/crio-52783219fe13ad971b7b03f065b1beab6aacc21b08953428119659267eb0d190 WatchSource:0}: Error finding container 52783219fe13ad971b7b03f065b1beab6aacc21b08953428119659267eb0d190: Status 404 returned error can't find the container with id 52783219fe13ad971b7b03f065b1beab6aacc21b08953428119659267eb0d190 Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.660241 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:43:54 crc kubenswrapper[4773]: E0122 13:43:54.661032 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.677969 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16320e64-130c-4657-acc2-6025d895a31d" path="/var/lib/kubelet/pods/16320e64-130c-4657-acc2-6025d895a31d/volumes" Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.681004 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5409c3ae-c282-4e60-bc93-e0c5bfdb1304" path="/var/lib/kubelet/pods/5409c3ae-c282-4e60-bc93-e0c5bfdb1304/volumes" Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.973776 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"26a329e1-f0b6-46bb-98c5-6c094855f241","Type":"ContainerStarted","Data":"deafa5d9aec3afe5f7d0a410421396ccc2c801dec87d9290977b43bd5ab8edb7"} Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.976056 4773 generic.go:334] "Generic (PLEG): container finished" podID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerID="ba28c12d32db1e3d09ad1fd0b3aa3e3c7ae7964ba316646057a8ed2a94cf93f4" exitCode=0 Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.976136 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" event={"ID":"2ea5428a-d493-4da7-9844-bdf9a53e0f45","Type":"ContainerDied","Data":"ba28c12d32db1e3d09ad1fd0b3aa3e3c7ae7964ba316646057a8ed2a94cf93f4"} Jan 22 13:43:54 crc kubenswrapper[4773]: I0122 13:43:54.976171 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" event={"ID":"2ea5428a-d493-4da7-9844-bdf9a53e0f45","Type":"ContainerStarted","Data":"52783219fe13ad971b7b03f065b1beab6aacc21b08953428119659267eb0d190"} Jan 22 13:43:56 crc kubenswrapper[4773]: I0122 13:43:56.032529 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"26a329e1-f0b6-46bb-98c5-6c094855f241","Type":"ContainerStarted","Data":"f183a28d8674d72f98187811597fabdff875422fb3ad554144d1e5bd1a1edea4"} Jan 22 13:43:56 crc kubenswrapper[4773]: I0122 13:43:56.044529 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" event={"ID":"2ea5428a-d493-4da7-9844-bdf9a53e0f45","Type":"ContainerStarted","Data":"4a5cc44517398fef3f3fa9d045344a4e39ef46d30fdd36ecc8ea61943c16d2d9"} Jan 22 13:43:56 crc kubenswrapper[4773]: I0122 13:43:56.045296 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:43:56 crc kubenswrapper[4773]: I0122 13:43:56.077538 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=3.292846763 podStartE2EDuration="6.077517078s" podCreationTimestamp="2026-01-22 13:43:50 +0000 UTC" firstStartedPulling="2026-01-22 13:43:51.936487651 +0000 UTC m=+6539.514603476" lastFinishedPulling="2026-01-22 13:43:54.721157956 +0000 UTC m=+6542.299273791" observedRunningTime="2026-01-22 13:43:56.074917525 +0000 UTC m=+6543.653033360" watchObservedRunningTime="2026-01-22 13:43:56.077517078 +0000 UTC m=+6543.655632903" Jan 22 13:43:56 crc kubenswrapper[4773]: I0122 13:43:56.114102 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" podStartSLOduration=3.114074095 podStartE2EDuration="3.114074095s" podCreationTimestamp="2026-01-22 13:43:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:43:56.105654969 +0000 UTC m=+6543.683770794" watchObservedRunningTime="2026-01-22 13:43:56.114074095 +0000 UTC m=+6543.692189920" Jan 22 13:43:59 crc kubenswrapper[4773]: I0122 13:43:59.294994 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 13:44:03 crc kubenswrapper[4773]: I0122 13:44:03.277298 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:44:03 crc kubenswrapper[4773]: I0122 13:44:03.278093 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="516a794c-7420-49f5-aae8-faca42ece1de" containerName="kube-state-metrics" containerID="cri-o://9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c" gracePeriod=30 Jan 22 13:44:03 crc kubenswrapper[4773]: I0122 13:44:03.677395 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:44:03 crc kubenswrapper[4773]: I0122 13:44:03.762812 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84dd694dd5-rdt6v"] Jan 22 13:44:03 crc kubenswrapper[4773]: I0122 13:44:03.763299 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerName="dnsmasq-dns" containerID="cri-o://5ebb7cb37f0a3a7ee83cf0d45c687d11533039b4a8a029f5232308bafb1e6a5b" gracePeriod=10 Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.011762 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.092072 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78669f889-54nfc"] Jan 22 13:44:04 crc kubenswrapper[4773]: E0122 13:44:04.093342 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516a794c-7420-49f5-aae8-faca42ece1de" containerName="kube-state-metrics" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.093362 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="516a794c-7420-49f5-aae8-faca42ece1de" containerName="kube-state-metrics" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.093940 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="516a794c-7420-49f5-aae8-faca42ece1de" containerName="kube-state-metrics" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.097369 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.154980 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgn4d\" (UniqueName: \"kubernetes.io/projected/516a794c-7420-49f5-aae8-faca42ece1de-kube-api-access-dgn4d\") pod \"516a794c-7420-49f5-aae8-faca42ece1de\" (UID: \"516a794c-7420-49f5-aae8-faca42ece1de\") " Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.255225 4773 generic.go:334] "Generic (PLEG): container finished" podID="516a794c-7420-49f5-aae8-faca42ece1de" containerID="9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c" exitCode=2 Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.255340 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"516a794c-7420-49f5-aae8-faca42ece1de","Type":"ContainerDied","Data":"9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c"} Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.255376 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"516a794c-7420-49f5-aae8-faca42ece1de","Type":"ContainerDied","Data":"e8dff7816072c2af3a2e5fa0e4e6d2176ef74aa6285ace52ce272e01d8581a0c"} Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.255395 4773 scope.go:117] "RemoveContainer" containerID="9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.255598 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.275548 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78669f889-54nfc"] Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.279114 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516a794c-7420-49f5-aae8-faca42ece1de-kube-api-access-dgn4d" (OuterVolumeSpecName: "kube-api-access-dgn4d") pod "516a794c-7420-49f5-aae8-faca42ece1de" (UID: "516a794c-7420-49f5-aae8-faca42ece1de"). InnerVolumeSpecName "kube-api-access-dgn4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.281799 4773 generic.go:334] "Generic (PLEG): container finished" podID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerID="5ebb7cb37f0a3a7ee83cf0d45c687d11533039b4a8a029f5232308bafb1e6a5b" exitCode=0 Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.281850 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" event={"ID":"9ea3757f-a094-4d6b-bacf-fb311010aa70","Type":"ContainerDied","Data":"5ebb7cb37f0a3a7ee83cf0d45c687d11533039b4a8a029f5232308bafb1e6a5b"} Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.329636 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-ovsdbserver-nb\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.329687 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-openstack-cell1\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.329730 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-dns-svc\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.331544 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-ovsdbserver-sb\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.331857 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-config\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.331928 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl9gm\" (UniqueName: \"kubernetes.io/projected/4918760f-8cd5-4b8e-9cd3-623967f73d9d-kube-api-access-wl9gm\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.332131 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgn4d\" (UniqueName: \"kubernetes.io/projected/516a794c-7420-49f5-aae8-faca42ece1de-kube-api-access-dgn4d\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.346655 4773 scope.go:117] "RemoveContainer" containerID="9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c" Jan 22 13:44:04 crc kubenswrapper[4773]: E0122 13:44:04.361920 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c\": container with ID starting with 9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c not found: ID does not exist" containerID="9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.362213 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c"} err="failed to get container status \"9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c\": rpc error: code = NotFound desc = could not find container \"9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c\": container with ID starting with 9f5cf96d4e14559de4d5fffcc35a9791fdfc5327143a4388a952a110d8dfed1c not found: ID does not exist" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.436793 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-ovsdbserver-sb\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.436919 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-config\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.436958 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl9gm\" (UniqueName: \"kubernetes.io/projected/4918760f-8cd5-4b8e-9cd3-623967f73d9d-kube-api-access-wl9gm\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.437074 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-ovsdbserver-nb\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.437103 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-openstack-cell1\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.437154 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-dns-svc\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.438530 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-ovsdbserver-nb\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.438702 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-ovsdbserver-sb\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.439124 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-openstack-cell1\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.439308 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-config\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.439546 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4918760f-8cd5-4b8e-9cd3-623967f73d9d-dns-svc\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.489833 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl9gm\" (UniqueName: \"kubernetes.io/projected/4918760f-8cd5-4b8e-9cd3-623967f73d9d-kube-api-access-wl9gm\") pod \"dnsmasq-dns-78669f889-54nfc\" (UID: \"4918760f-8cd5-4b8e-9cd3-623967f73d9d\") " pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.528081 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.672246 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.684648 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.692618 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.722438 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:44:04 crc kubenswrapper[4773]: E0122 13:44:04.723245 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerName="dnsmasq-dns" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.723266 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerName="dnsmasq-dns" Jan 22 13:44:04 crc kubenswrapper[4773]: E0122 13:44:04.728621 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerName="init" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.728683 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerName="init" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.729668 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" containerName="dnsmasq-dns" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.730572 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.736217 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.737870 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.743408 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5mff\" (UniqueName: \"kubernetes.io/projected/9ea3757f-a094-4d6b-bacf-fb311010aa70-kube-api-access-w5mff\") pod \"9ea3757f-a094-4d6b-bacf-fb311010aa70\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.743479 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-dns-svc\") pod \"9ea3757f-a094-4d6b-bacf-fb311010aa70\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.743635 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-sb\") pod \"9ea3757f-a094-4d6b-bacf-fb311010aa70\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.743674 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-config\") pod \"9ea3757f-a094-4d6b-bacf-fb311010aa70\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.743699 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-nb\") pod \"9ea3757f-a094-4d6b-bacf-fb311010aa70\" (UID: \"9ea3757f-a094-4d6b-bacf-fb311010aa70\") " Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.743957 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.744008 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm575\" (UniqueName: \"kubernetes.io/projected/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-api-access-nm575\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.744043 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.744125 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.753183 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ea3757f-a094-4d6b-bacf-fb311010aa70-kube-api-access-w5mff" (OuterVolumeSpecName: "kube-api-access-w5mff") pod "9ea3757f-a094-4d6b-bacf-fb311010aa70" (UID: "9ea3757f-a094-4d6b-bacf-fb311010aa70"). InnerVolumeSpecName "kube-api-access-w5mff". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.804089 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.823445 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-config" (OuterVolumeSpecName: "config") pod "9ea3757f-a094-4d6b-bacf-fb311010aa70" (UID: "9ea3757f-a094-4d6b-bacf-fb311010aa70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.846229 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.847792 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm575\" (UniqueName: \"kubernetes.io/projected/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-api-access-nm575\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.847927 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.848179 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.849777 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.849807 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5mff\" (UniqueName: \"kubernetes.io/projected/9ea3757f-a094-4d6b-bacf-fb311010aa70-kube-api-access-w5mff\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.850963 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9ea3757f-a094-4d6b-bacf-fb311010aa70" (UID: "9ea3757f-a094-4d6b-bacf-fb311010aa70"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.855465 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.855709 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.858622 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d7992be-21ed-446d-bf12-1adeccd86d66-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.868112 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9ea3757f-a094-4d6b-bacf-fb311010aa70" (UID: "9ea3757f-a094-4d6b-bacf-fb311010aa70"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.871116 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm575\" (UniqueName: \"kubernetes.io/projected/8d7992be-21ed-446d-bf12-1adeccd86d66-kube-api-access-nm575\") pod \"kube-state-metrics-0\" (UID: \"8d7992be-21ed-446d-bf12-1adeccd86d66\") " pod="openstack/kube-state-metrics-0" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.904130 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9ea3757f-a094-4d6b-bacf-fb311010aa70" (UID: "9ea3757f-a094-4d6b-bacf-fb311010aa70"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.952959 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.953003 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:04 crc kubenswrapper[4773]: I0122 13:44:04.953017 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9ea3757f-a094-4d6b-bacf-fb311010aa70-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.142480 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78669f889-54nfc"] Jan 22 13:44:05 crc kubenswrapper[4773]: W0122 13:44:05.145737 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4918760f_8cd5_4b8e_9cd3_623967f73d9d.slice/crio-5d1c54e50c4b580ee56117bc7c712c5248f94f08322050c9d66c7eac18a59c9b WatchSource:0}: Error finding container 5d1c54e50c4b580ee56117bc7c712c5248f94f08322050c9d66c7eac18a59c9b: Status 404 returned error can't find the container with id 5d1c54e50c4b580ee56117bc7c712c5248f94f08322050c9d66c7eac18a59c9b Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.169364 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.308544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78669f889-54nfc" event={"ID":"4918760f-8cd5-4b8e-9cd3-623967f73d9d","Type":"ContainerStarted","Data":"5d1c54e50c4b580ee56117bc7c712c5248f94f08322050c9d66c7eac18a59c9b"} Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.313326 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" event={"ID":"9ea3757f-a094-4d6b-bacf-fb311010aa70","Type":"ContainerDied","Data":"933c103807f5ed90978bd9592c7e70da1bfdbd41ff710ab3d3e12b9c9fad240f"} Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.313380 4773 scope.go:117] "RemoveContainer" containerID="5ebb7cb37f0a3a7ee83cf0d45c687d11533039b4a8a029f5232308bafb1e6a5b" Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.313523 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84dd694dd5-rdt6v" Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.358182 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84dd694dd5-rdt6v"] Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.370876 4773 scope.go:117] "RemoveContainer" containerID="676844cb2c1bd7e6b94965b73fdf95066f05be93de81298d3c210f5edbcab3a1" Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.374971 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84dd694dd5-rdt6v"] Jan 22 13:44:05 crc kubenswrapper[4773]: I0122 13:44:05.710044 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 22 13:44:05 crc kubenswrapper[4773]: W0122 13:44:05.712031 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d7992be_21ed_446d_bf12_1adeccd86d66.slice/crio-dff4b4ff24f812c6599c70633f0248c14aed41514768f8b52a2a7d54c8f0e4a3 WatchSource:0}: Error finding container dff4b4ff24f812c6599c70633f0248c14aed41514768f8b52a2a7d54c8f0e4a3: Status 404 returned error can't find the container with id dff4b4ff24f812c6599c70633f0248c14aed41514768f8b52a2a7d54c8f0e4a3 Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.327685 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8d7992be-21ed-446d-bf12-1adeccd86d66","Type":"ContainerStarted","Data":"dff4b4ff24f812c6599c70633f0248c14aed41514768f8b52a2a7d54c8f0e4a3"} Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.331390 4773 generic.go:334] "Generic (PLEG): container finished" podID="4918760f-8cd5-4b8e-9cd3-623967f73d9d" containerID="03a28e7abf96d6dc1f71b4ad80ade0006cacaf7e6ef80530898247f80b7c6178" exitCode=0 Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.331435 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78669f889-54nfc" event={"ID":"4918760f-8cd5-4b8e-9cd3-623967f73d9d","Type":"ContainerDied","Data":"03a28e7abf96d6dc1f71b4ad80ade0006cacaf7e6ef80530898247f80b7c6178"} Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.722597 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="516a794c-7420-49f5-aae8-faca42ece1de" path="/var/lib/kubelet/pods/516a794c-7420-49f5-aae8-faca42ece1de/volumes" Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.723585 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ea3757f-a094-4d6b-bacf-fb311010aa70" path="/var/lib/kubelet/pods/9ea3757f-a094-4d6b-bacf-fb311010aa70/volumes" Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.733476 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.734043 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-central-agent" containerID="cri-o://2db2434d68aeb15c856d58768f25a6bf5dd6e609d79b76abc9b6a13aeaf07158" gracePeriod=30 Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.734092 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="proxy-httpd" containerID="cri-o://d02b99b5e05e1d6452897d62a49b07ecaedcfcc3164fe335b7f82d835f4e5d25" gracePeriod=30 Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.734134 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="sg-core" containerID="cri-o://fdff99889a56b0b9d67065673a29bbece2e044cd61c51318b56efdef625126cf" gracePeriod=30 Jan 22 13:44:06 crc kubenswrapper[4773]: I0122 13:44:06.734161 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-notification-agent" containerID="cri-o://5196ef9045bc2fee474875509a25dd3bfceceb9c30c1df98a41666c3e138001e" gracePeriod=30 Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.357819 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"8d7992be-21ed-446d-bf12-1adeccd86d66","Type":"ContainerStarted","Data":"7a716bddf85c116219829afa2152cd6b8d3a2cc22606efefff8e5fa999054731"} Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.359908 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.372036 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78669f889-54nfc" event={"ID":"4918760f-8cd5-4b8e-9cd3-623967f73d9d","Type":"ContainerStarted","Data":"d359b688d60d41894ec75683a24b54e46c0767c08a31fec85247a2b6b58ccb1f"} Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.372489 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.381827 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.965751991 podStartE2EDuration="3.381795934s" podCreationTimestamp="2026-01-22 13:44:04 +0000 UTC" firstStartedPulling="2026-01-22 13:44:05.722621691 +0000 UTC m=+6553.300737516" lastFinishedPulling="2026-01-22 13:44:06.138665634 +0000 UTC m=+6553.716781459" observedRunningTime="2026-01-22 13:44:07.379204551 +0000 UTC m=+6554.957320376" watchObservedRunningTime="2026-01-22 13:44:07.381795934 +0000 UTC m=+6554.959911759" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387483 4773 generic.go:334] "Generic (PLEG): container finished" podID="07d47c0f-c8b6-4850-a950-692906949bdd" containerID="d02b99b5e05e1d6452897d62a49b07ecaedcfcc3164fe335b7f82d835f4e5d25" exitCode=0 Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387516 4773 generic.go:334] "Generic (PLEG): container finished" podID="07d47c0f-c8b6-4850-a950-692906949bdd" containerID="fdff99889a56b0b9d67065673a29bbece2e044cd61c51318b56efdef625126cf" exitCode=2 Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387528 4773 generic.go:334] "Generic (PLEG): container finished" podID="07d47c0f-c8b6-4850-a950-692906949bdd" containerID="5196ef9045bc2fee474875509a25dd3bfceceb9c30c1df98a41666c3e138001e" exitCode=0 Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387538 4773 generic.go:334] "Generic (PLEG): container finished" podID="07d47c0f-c8b6-4850-a950-692906949bdd" containerID="2db2434d68aeb15c856d58768f25a6bf5dd6e609d79b76abc9b6a13aeaf07158" exitCode=0 Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387565 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerDied","Data":"d02b99b5e05e1d6452897d62a49b07ecaedcfcc3164fe335b7f82d835f4e5d25"} Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerDied","Data":"fdff99889a56b0b9d67065673a29bbece2e044cd61c51318b56efdef625126cf"} Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387642 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerDied","Data":"5196ef9045bc2fee474875509a25dd3bfceceb9c30c1df98a41666c3e138001e"} Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.387653 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerDied","Data":"2db2434d68aeb15c856d58768f25a6bf5dd6e609d79b76abc9b6a13aeaf07158"} Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.413551 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78669f889-54nfc" podStartSLOduration=3.413529486 podStartE2EDuration="3.413529486s" podCreationTimestamp="2026-01-22 13:44:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:44:07.405738097 +0000 UTC m=+6554.983853932" watchObservedRunningTime="2026-01-22 13:44:07.413529486 +0000 UTC m=+6554.991645311" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.476395 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.529841 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-log-httpd\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.529927 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nznvb\" (UniqueName: \"kubernetes.io/projected/07d47c0f-c8b6-4850-a950-692906949bdd-kube-api-access-nznvb\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.529953 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-combined-ca-bundle\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.529974 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-config-data\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.530025 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-sg-core-conf-yaml\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.530084 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-scripts\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.530108 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-run-httpd\") pod \"07d47c0f-c8b6-4850-a950-692906949bdd\" (UID: \"07d47c0f-c8b6-4850-a950-692906949bdd\") " Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.534172 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.536918 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07d47c0f-c8b6-4850-a950-692906949bdd-kube-api-access-nznvb" (OuterVolumeSpecName: "kube-api-access-nznvb") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "kube-api-access-nznvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.538049 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.542542 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-scripts" (OuterVolumeSpecName: "scripts") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.570257 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.634360 4773 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.634404 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nznvb\" (UniqueName: \"kubernetes.io/projected/07d47c0f-c8b6-4850-a950-692906949bdd-kube-api-access-nznvb\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.634417 4773 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.634428 4773 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-scripts\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.634438 4773 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/07d47c0f-c8b6-4850-a950-692906949bdd-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.634444 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.654605 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-config-data" (OuterVolumeSpecName: "config-data") pod "07d47c0f-c8b6-4850-a950-692906949bdd" (UID: "07d47c0f-c8b6-4850-a950-692906949bdd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.737610 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:07 crc kubenswrapper[4773]: I0122 13:44:07.737651 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07d47c0f-c8b6-4850-a950-692906949bdd-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.409431 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"07d47c0f-c8b6-4850-a950-692906949bdd","Type":"ContainerDied","Data":"4064579bc3be456fc7a6e54289002a223ca01739b5f2c3637334c94a64200eeb"} Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.409831 4773 scope.go:117] "RemoveContainer" containerID="d02b99b5e05e1d6452897d62a49b07ecaedcfcc3164fe335b7f82d835f4e5d25" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.409750 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.449807 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.468259 4773 scope.go:117] "RemoveContainer" containerID="fdff99889a56b0b9d67065673a29bbece2e044cd61c51318b56efdef625126cf" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.477952 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492016 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:44:08 crc kubenswrapper[4773]: E0122 13:44:08.492524 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="sg-core" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492540 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="sg-core" Jan 22 13:44:08 crc kubenswrapper[4773]: E0122 13:44:08.492560 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="proxy-httpd" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492566 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="proxy-httpd" Jan 22 13:44:08 crc kubenswrapper[4773]: E0122 13:44:08.492583 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-central-agent" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492589 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-central-agent" Jan 22 13:44:08 crc kubenswrapper[4773]: E0122 13:44:08.492606 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-notification-agent" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492611 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-notification-agent" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492801 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="proxy-httpd" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492813 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-notification-agent" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492826 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="sg-core" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.492845 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" containerName="ceilometer-central-agent" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.496657 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.501799 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.502023 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.502205 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.522979 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.523094 4773 scope.go:117] "RemoveContainer" containerID="5196ef9045bc2fee474875509a25dd3bfceceb9c30c1df98a41666c3e138001e" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.550098 4773 scope.go:117] "RemoveContainer" containerID="2db2434d68aeb15c856d58768f25a6bf5dd6e609d79b76abc9b6a13aeaf07158" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.659587 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-config-data\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.659879 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.661007 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b2ba1736-246e-42ae-9249-e635b29993ae-log-httpd\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.661063 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-scripts\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.661120 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jql8\" (UniqueName: \"kubernetes.io/projected/b2ba1736-246e-42ae-9249-e635b29993ae-kube-api-access-6jql8\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.661240 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.661267 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.661332 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b2ba1736-246e-42ae-9249-e635b29993ae-run-httpd\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.677716 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07d47c0f-c8b6-4850-a950-692906949bdd" path="/var/lib/kubelet/pods/07d47c0f-c8b6-4850-a950-692906949bdd/volumes" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762548 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762602 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762644 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b2ba1736-246e-42ae-9249-e635b29993ae-run-httpd\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762696 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-config-data\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762769 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762854 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b2ba1736-246e-42ae-9249-e635b29993ae-log-httpd\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762877 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-scripts\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.762923 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jql8\" (UniqueName: \"kubernetes.io/projected/b2ba1736-246e-42ae-9249-e635b29993ae-kube-api-access-6jql8\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.763273 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b2ba1736-246e-42ae-9249-e635b29993ae-run-httpd\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.763553 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b2ba1736-246e-42ae-9249-e635b29993ae-log-httpd\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.768713 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-scripts\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.769008 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.769728 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-config-data\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.772083 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.776156 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ba1736-246e-42ae-9249-e635b29993ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.783903 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jql8\" (UniqueName: \"kubernetes.io/projected/b2ba1736-246e-42ae-9249-e635b29993ae-kube-api-access-6jql8\") pod \"ceilometer-0\" (UID: \"b2ba1736-246e-42ae-9249-e635b29993ae\") " pod="openstack/ceilometer-0" Jan 22 13:44:08 crc kubenswrapper[4773]: I0122 13:44:08.833784 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.065688 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz"] Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.067384 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.069854 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.069967 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.071139 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.071249 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.087919 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz"] Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.173097 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.173186 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.173347 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqxfb\" (UniqueName: \"kubernetes.io/projected/654d399b-e1b1-41bd-86e4-a1806f829a79-kube-api-access-mqxfb\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.173381 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.274996 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqxfb\" (UniqueName: \"kubernetes.io/projected/654d399b-e1b1-41bd-86e4-a1806f829a79-kube-api-access-mqxfb\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.275071 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.275152 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.275219 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.280501 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-ssh-key-openstack-cell1\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.280646 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.287826 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.301740 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqxfb\" (UniqueName: \"kubernetes.io/projected/654d399b-e1b1-41bd-86e4-a1806f829a79-kube-api-access-mqxfb\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.363253 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.399494 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.445566 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b2ba1736-246e-42ae-9249-e635b29993ae","Type":"ContainerStarted","Data":"6b0080efc8d990ac75e983c34fb068830c309c8bf750dfde0af900cee3b8a863"} Jan 22 13:44:09 crc kubenswrapper[4773]: I0122 13:44:09.658988 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:44:09 crc kubenswrapper[4773]: E0122 13:44:09.659647 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:44:10 crc kubenswrapper[4773]: I0122 13:44:10.062661 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz"] Jan 22 13:44:10 crc kubenswrapper[4773]: W0122 13:44:10.064782 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod654d399b_e1b1_41bd_86e4_a1806f829a79.slice/crio-90d1858741241f22e0886c690e6d285857108c92d9bcc8cd319569d1bae8e0c5 WatchSource:0}: Error finding container 90d1858741241f22e0886c690e6d285857108c92d9bcc8cd319569d1bae8e0c5: Status 404 returned error can't find the container with id 90d1858741241f22e0886c690e6d285857108c92d9bcc8cd319569d1bae8e0c5 Jan 22 13:44:10 crc kubenswrapper[4773]: I0122 13:44:10.472998 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" event={"ID":"654d399b-e1b1-41bd-86e4-a1806f829a79","Type":"ContainerStarted","Data":"90d1858741241f22e0886c690e6d285857108c92d9bcc8cd319569d1bae8e0c5"} Jan 22 13:44:10 crc kubenswrapper[4773]: I0122 13:44:10.474618 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b2ba1736-246e-42ae-9249-e635b29993ae","Type":"ContainerStarted","Data":"84b25cfa789dc519aa6cf3c2a9054eb2ba0b3d1d75b020eef329f6f6a628ca78"} Jan 22 13:44:11 crc kubenswrapper[4773]: I0122 13:44:11.035565 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-pw7mf"] Jan 22 13:44:11 crc kubenswrapper[4773]: I0122 13:44:11.044774 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-pw7mf"] Jan 22 13:44:11 crc kubenswrapper[4773]: I0122 13:44:11.489167 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b2ba1736-246e-42ae-9249-e635b29993ae","Type":"ContainerStarted","Data":"ca06faac11c422aadc4e0462236a4c456e52844c9df311e9201170b168f6e5c5"} Jan 22 13:44:11 crc kubenswrapper[4773]: I0122 13:44:11.956927 4773 scope.go:117] "RemoveContainer" containerID="03ab9ec0d99f4b6cbc63f93fa3e034e57653be93c4c60e6b910b6218a3373f70" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.003732 4773 scope.go:117] "RemoveContainer" containerID="4c51bb3b636763d9a983c394a01fdb2b2207faf5ca471f7db77abd7ec9a76f93" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.078364 4773 scope.go:117] "RemoveContainer" containerID="879cd149d7544b2f8d5892b15d2f0516649eef95b56cb998b7e5898652ef4c90" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.130753 4773 scope.go:117] "RemoveContainer" containerID="d6902b610a467b025db273ef0022bd28db2eabfc0e0030d639257578f658b528" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.169133 4773 scope.go:117] "RemoveContainer" containerID="f704af551d1d977eb806cea7d27265bea74385548000cda73956e99983dd372f" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.236033 4773 scope.go:117] "RemoveContainer" containerID="41d17ec951c709ec52a04c0d579439533f01127f7683c9d2e2fbfbd23184ece7" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.276761 4773 scope.go:117] "RemoveContainer" containerID="4c321afd1b06b90404d62d5fa65b47ac50f2a4c0154dc8ae436154834c1721ba" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.309311 4773 scope.go:117] "RemoveContainer" containerID="7cbaae31421963b6f28ca2af7da2a8f4379da82ea2e3c7d1aacd16bd6accdd8b" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.357444 4773 scope.go:117] "RemoveContainer" containerID="4eae09eafdf8b1fd9443239c072d17d5ba48962677fb9907558b0d30ceec0a21" Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.508658 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b2ba1736-246e-42ae-9249-e635b29993ae","Type":"ContainerStarted","Data":"39748796f81934609d96c0e0db99074ccdb986ce6616cf2d36d78a2ffdbe58f7"} Jan 22 13:44:12 crc kubenswrapper[4773]: I0122 13:44:12.673129 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afb23a79-30ca-4764-8e4b-2dcf54c41fac" path="/var/lib/kubelet/pods/afb23a79-30ca-4764-8e4b-2dcf54c41fac/volumes" Jan 22 13:44:14 crc kubenswrapper[4773]: I0122 13:44:14.529531 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78669f889-54nfc" Jan 22 13:44:14 crc kubenswrapper[4773]: I0122 13:44:14.587811 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b2ba1736-246e-42ae-9249-e635b29993ae","Type":"ContainerStarted","Data":"8bff6d80fb4ebae2802995372a1546d68e7068255254ce52a0bef2d48d892562"} Jan 22 13:44:14 crc kubenswrapper[4773]: I0122 13:44:14.589756 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 22 13:44:14 crc kubenswrapper[4773]: I0122 13:44:14.606078 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6b5ff785-qwfwg"] Jan 22 13:44:14 crc kubenswrapper[4773]: I0122 13:44:14.606388 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="dnsmasq-dns" containerID="cri-o://4a5cc44517398fef3f3fa9d045344a4e39ef46d30fdd36ecc8ea61943c16d2d9" gracePeriod=10 Jan 22 13:44:14 crc kubenswrapper[4773]: I0122 13:44:14.635677 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6964588750000003 podStartE2EDuration="6.635653159s" podCreationTimestamp="2026-01-22 13:44:08 +0000 UTC" firstStartedPulling="2026-01-22 13:44:09.400922823 +0000 UTC m=+6556.979038648" lastFinishedPulling="2026-01-22 13:44:13.340117107 +0000 UTC m=+6560.918232932" observedRunningTime="2026-01-22 13:44:14.619787873 +0000 UTC m=+6562.197903708" watchObservedRunningTime="2026-01-22 13:44:14.635653159 +0000 UTC m=+6562.213768984" Jan 22 13:44:15 crc kubenswrapper[4773]: I0122 13:44:15.186098 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 22 13:44:15 crc kubenswrapper[4773]: I0122 13:44:15.602942 4773 generic.go:334] "Generic (PLEG): container finished" podID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerID="4a5cc44517398fef3f3fa9d045344a4e39ef46d30fdd36ecc8ea61943c16d2d9" exitCode=0 Jan 22 13:44:15 crc kubenswrapper[4773]: I0122 13:44:15.603042 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" event={"ID":"2ea5428a-d493-4da7-9844-bdf9a53e0f45","Type":"ContainerDied","Data":"4a5cc44517398fef3f3fa9d045344a4e39ef46d30fdd36ecc8ea61943c16d2d9"} Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.255686 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.338311 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-dns-svc\") pod \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.338672 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-openstack-cell1\") pod \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.338744 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-nb\") pod \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.338812 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-sb\") pod \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.339169 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-468mh\" (UniqueName: \"kubernetes.io/projected/2ea5428a-d493-4da7-9844-bdf9a53e0f45-kube-api-access-468mh\") pod \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.339254 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-config\") pod \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\" (UID: \"2ea5428a-d493-4da7-9844-bdf9a53e0f45\") " Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.353236 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea5428a-d493-4da7-9844-bdf9a53e0f45-kube-api-access-468mh" (OuterVolumeSpecName: "kube-api-access-468mh") pod "2ea5428a-d493-4da7-9844-bdf9a53e0f45" (UID: "2ea5428a-d493-4da7-9844-bdf9a53e0f45"). InnerVolumeSpecName "kube-api-access-468mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.415711 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-config" (OuterVolumeSpecName: "config") pod "2ea5428a-d493-4da7-9844-bdf9a53e0f45" (UID: "2ea5428a-d493-4da7-9844-bdf9a53e0f45"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.418103 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2ea5428a-d493-4da7-9844-bdf9a53e0f45" (UID: "2ea5428a-d493-4da7-9844-bdf9a53e0f45"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.425776 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2ea5428a-d493-4da7-9844-bdf9a53e0f45" (UID: "2ea5428a-d493-4da7-9844-bdf9a53e0f45"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.425947 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "2ea5428a-d493-4da7-9844-bdf9a53e0f45" (UID: "2ea5428a-d493-4da7-9844-bdf9a53e0f45"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.442885 4773 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.442959 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.442971 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.442982 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-468mh\" (UniqueName: \"kubernetes.io/projected/2ea5428a-d493-4da7-9844-bdf9a53e0f45-kube-api-access-468mh\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.443038 4773 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-config\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.447672 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2ea5428a-d493-4da7-9844-bdf9a53e0f45" (UID: "2ea5428a-d493-4da7-9844-bdf9a53e0f45"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.546067 4773 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ea5428a-d493-4da7-9844-bdf9a53e0f45-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.681803 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.681793 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" event={"ID":"2ea5428a-d493-4da7-9844-bdf9a53e0f45","Type":"ContainerDied","Data":"52783219fe13ad971b7b03f065b1beab6aacc21b08953428119659267eb0d190"} Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.681982 4773 scope.go:117] "RemoveContainer" containerID="4a5cc44517398fef3f3fa9d045344a4e39ef46d30fdd36ecc8ea61943c16d2d9" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.685694 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" event={"ID":"654d399b-e1b1-41bd-86e4-a1806f829a79","Type":"ContainerStarted","Data":"89069c2b95fca0284cd2d4964812adec5da19394d25e4e44daa0eac31aaa512a"} Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.711341 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" podStartSLOduration=1.706867538 podStartE2EDuration="13.711317833s" podCreationTimestamp="2026-01-22 13:44:09 +0000 UTC" firstStartedPulling="2026-01-22 13:44:10.068076484 +0000 UTC m=+6557.646192309" lastFinishedPulling="2026-01-22 13:44:22.072526779 +0000 UTC m=+6569.650642604" observedRunningTime="2026-01-22 13:44:22.708212625 +0000 UTC m=+6570.286328490" watchObservedRunningTime="2026-01-22 13:44:22.711317833 +0000 UTC m=+6570.289433668" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.743495 4773 scope.go:117] "RemoveContainer" containerID="ba28c12d32db1e3d09ad1fd0b3aa3e3c7ae7964ba316646057a8ed2a94cf93f4" Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.749460 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6b5ff785-qwfwg"] Jan 22 13:44:22 crc kubenswrapper[4773]: I0122 13:44:22.769106 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b6b5ff785-qwfwg"] Jan 22 13:44:23 crc kubenswrapper[4773]: I0122 13:44:23.659573 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:44:23 crc kubenswrapper[4773]: E0122 13:44:23.660332 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:44:23 crc kubenswrapper[4773]: I0122 13:44:23.676729 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b6b5ff785-qwfwg" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.161:5353: i/o timeout" Jan 22 13:44:24 crc kubenswrapper[4773]: I0122 13:44:24.675176 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" path="/var/lib/kubelet/pods/2ea5428a-d493-4da7-9844-bdf9a53e0f45/volumes" Jan 22 13:44:35 crc kubenswrapper[4773]: I0122 13:44:35.847603 4773 generic.go:334] "Generic (PLEG): container finished" podID="654d399b-e1b1-41bd-86e4-a1806f829a79" containerID="89069c2b95fca0284cd2d4964812adec5da19394d25e4e44daa0eac31aaa512a" exitCode=0 Jan 22 13:44:35 crc kubenswrapper[4773]: I0122 13:44:35.847714 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" event={"ID":"654d399b-e1b1-41bd-86e4-a1806f829a79","Type":"ContainerDied","Data":"89069c2b95fca0284cd2d4964812adec5da19394d25e4e44daa0eac31aaa512a"} Jan 22 13:44:36 crc kubenswrapper[4773]: I0122 13:44:36.658947 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:44:36 crc kubenswrapper[4773]: E0122 13:44:36.659829 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.333921 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.430060 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-ssh-key-openstack-cell1\") pod \"654d399b-e1b1-41bd-86e4-a1806f829a79\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.430215 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqxfb\" (UniqueName: \"kubernetes.io/projected/654d399b-e1b1-41bd-86e4-a1806f829a79-kube-api-access-mqxfb\") pod \"654d399b-e1b1-41bd-86e4-a1806f829a79\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.430313 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-pre-adoption-validation-combined-ca-bundle\") pod \"654d399b-e1b1-41bd-86e4-a1806f829a79\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.430390 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-inventory\") pod \"654d399b-e1b1-41bd-86e4-a1806f829a79\" (UID: \"654d399b-e1b1-41bd-86e4-a1806f829a79\") " Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.438612 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "654d399b-e1b1-41bd-86e4-a1806f829a79" (UID: "654d399b-e1b1-41bd-86e4-a1806f829a79"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.438878 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/654d399b-e1b1-41bd-86e4-a1806f829a79-kube-api-access-mqxfb" (OuterVolumeSpecName: "kube-api-access-mqxfb") pod "654d399b-e1b1-41bd-86e4-a1806f829a79" (UID: "654d399b-e1b1-41bd-86e4-a1806f829a79"). InnerVolumeSpecName "kube-api-access-mqxfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.465590 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-inventory" (OuterVolumeSpecName: "inventory") pod "654d399b-e1b1-41bd-86e4-a1806f829a79" (UID: "654d399b-e1b1-41bd-86e4-a1806f829a79"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.467831 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "654d399b-e1b1-41bd-86e4-a1806f829a79" (UID: "654d399b-e1b1-41bd-86e4-a1806f829a79"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.533590 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.533651 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.533664 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqxfb\" (UniqueName: \"kubernetes.io/projected/654d399b-e1b1-41bd-86e4-a1806f829a79-kube-api-access-mqxfb\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.533674 4773 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654d399b-e1b1-41bd-86e4-a1806f829a79-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.890757 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" event={"ID":"654d399b-e1b1-41bd-86e4-a1806f829a79","Type":"ContainerDied","Data":"90d1858741241f22e0886c690e6d285857108c92d9bcc8cd319569d1bae8e0c5"} Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.891146 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90d1858741241f22e0886c690e6d285857108c92d9bcc8cd319569d1bae8e0c5" Jan 22 13:44:37 crc kubenswrapper[4773]: I0122 13:44:37.890830 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz" Jan 22 13:44:38 crc kubenswrapper[4773]: I0122 13:44:38.848867 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.613955 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s"] Jan 22 13:44:46 crc kubenswrapper[4773]: E0122 13:44:46.614935 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="init" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.614949 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="init" Jan 22 13:44:46 crc kubenswrapper[4773]: E0122 13:44:46.614974 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="dnsmasq-dns" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.614980 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="dnsmasq-dns" Jan 22 13:44:46 crc kubenswrapper[4773]: E0122 13:44:46.614991 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="654d399b-e1b1-41bd-86e4-a1806f829a79" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.614999 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="654d399b-e1b1-41bd-86e4-a1806f829a79" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.615214 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="654d399b-e1b1-41bd-86e4-a1806f829a79" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.615266 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea5428a-d493-4da7-9844-bdf9a53e0f45" containerName="dnsmasq-dns" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.616326 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.619544 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.619980 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.620194 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.620506 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.636697 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s"] Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.684676 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.684721 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b2mv\" (UniqueName: \"kubernetes.io/projected/69b32bbb-8065-4c41-91bb-3f13e9e321d0-kube-api-access-8b2mv\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.684748 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.684982 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.787781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.787838 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b2mv\" (UniqueName: \"kubernetes.io/projected/69b32bbb-8065-4c41-91bb-3f13e9e321d0-kube-api-access-8b2mv\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.787883 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.787966 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.796102 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.797112 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.799255 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-ssh-key-openstack-cell1\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.813926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b2mv\" (UniqueName: \"kubernetes.io/projected/69b32bbb-8065-4c41-91bb-3f13e9e321d0-kube-api-access-8b2mv\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:46 crc kubenswrapper[4773]: I0122 13:44:46.955249 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:44:47 crc kubenswrapper[4773]: I0122 13:44:47.616795 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s"] Jan 22 13:44:48 crc kubenswrapper[4773]: I0122 13:44:48.035673 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" event={"ID":"69b32bbb-8065-4c41-91bb-3f13e9e321d0","Type":"ContainerStarted","Data":"0118fdb1f75003b6d85cc48dc54ef6b1b7ac77f1c8f34e0ef5d18e962ab3a020"} Jan 22 13:44:49 crc kubenswrapper[4773]: I0122 13:44:49.047817 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" event={"ID":"69b32bbb-8065-4c41-91bb-3f13e9e321d0","Type":"ContainerStarted","Data":"d635c41e00c085db88c3c69a166485f45d9263dfedc7cbed265571c53aab3abc"} Jan 22 13:44:49 crc kubenswrapper[4773]: I0122 13:44:49.082037 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" podStartSLOduration=2.601609587 podStartE2EDuration="3.082005649s" podCreationTimestamp="2026-01-22 13:44:46 +0000 UTC" firstStartedPulling="2026-01-22 13:44:47.620846263 +0000 UTC m=+6595.198962098" lastFinishedPulling="2026-01-22 13:44:48.101242335 +0000 UTC m=+6595.679358160" observedRunningTime="2026-01-22 13:44:49.077686868 +0000 UTC m=+6596.655802733" watchObservedRunningTime="2026-01-22 13:44:49.082005649 +0000 UTC m=+6596.660121514" Jan 22 13:44:50 crc kubenswrapper[4773]: I0122 13:44:50.659359 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:44:50 crc kubenswrapper[4773]: E0122 13:44:50.659982 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.145924 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg"] Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.148913 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.160094 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg"] Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.199448 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.199547 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b817b142-11f2-4a47-a22f-9c069367fdad-secret-volume\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.199657 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b817b142-11f2-4a47-a22f-9c069367fdad-config-volume\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.199880 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.199890 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m6vm\" (UniqueName: \"kubernetes.io/projected/b817b142-11f2-4a47-a22f-9c069367fdad-kube-api-access-4m6vm\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.301969 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b817b142-11f2-4a47-a22f-9c069367fdad-secret-volume\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.302061 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b817b142-11f2-4a47-a22f-9c069367fdad-config-volume\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.302173 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m6vm\" (UniqueName: \"kubernetes.io/projected/b817b142-11f2-4a47-a22f-9c069367fdad-kube-api-access-4m6vm\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.303164 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b817b142-11f2-4a47-a22f-9c069367fdad-config-volume\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.308710 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b817b142-11f2-4a47-a22f-9c069367fdad-secret-volume\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.321481 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m6vm\" (UniqueName: \"kubernetes.io/projected/b817b142-11f2-4a47-a22f-9c069367fdad-kube-api-access-4m6vm\") pod \"collect-profiles-29484825-wqrtg\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.528485 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:00 crc kubenswrapper[4773]: I0122 13:45:00.990863 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg"] Jan 22 13:45:01 crc kubenswrapper[4773]: I0122 13:45:01.219965 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" event={"ID":"b817b142-11f2-4a47-a22f-9c069367fdad","Type":"ContainerStarted","Data":"439bc0c45b59d44bf8d65ef287b86cd1c7e6669d84e8d222c333b7687c485497"} Jan 22 13:45:01 crc kubenswrapper[4773]: I0122 13:45:01.220020 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" event={"ID":"b817b142-11f2-4a47-a22f-9c069367fdad","Type":"ContainerStarted","Data":"57247d1f1d2d15d615a3a725742bc1326f96e4821d19480176c664e56b03a118"} Jan 22 13:45:01 crc kubenswrapper[4773]: I0122 13:45:01.252185 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" podStartSLOduration=1.2521622319999999 podStartE2EDuration="1.252162232s" podCreationTimestamp="2026-01-22 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 13:45:01.238700573 +0000 UTC m=+6608.816816448" watchObservedRunningTime="2026-01-22 13:45:01.252162232 +0000 UTC m=+6608.830278057" Jan 22 13:45:02 crc kubenswrapper[4773]: I0122 13:45:02.230843 4773 generic.go:334] "Generic (PLEG): container finished" podID="b817b142-11f2-4a47-a22f-9c069367fdad" containerID="439bc0c45b59d44bf8d65ef287b86cd1c7e6669d84e8d222c333b7687c485497" exitCode=0 Jan 22 13:45:02 crc kubenswrapper[4773]: I0122 13:45:02.230889 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" event={"ID":"b817b142-11f2-4a47-a22f-9c069367fdad","Type":"ContainerDied","Data":"439bc0c45b59d44bf8d65ef287b86cd1c7e6669d84e8d222c333b7687c485497"} Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.658804 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:45:03 crc kubenswrapper[4773]: E0122 13:45:03.659654 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.686106 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.796889 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m6vm\" (UniqueName: \"kubernetes.io/projected/b817b142-11f2-4a47-a22f-9c069367fdad-kube-api-access-4m6vm\") pod \"b817b142-11f2-4a47-a22f-9c069367fdad\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.797260 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b817b142-11f2-4a47-a22f-9c069367fdad-config-volume\") pod \"b817b142-11f2-4a47-a22f-9c069367fdad\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.797614 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b817b142-11f2-4a47-a22f-9c069367fdad-secret-volume\") pod \"b817b142-11f2-4a47-a22f-9c069367fdad\" (UID: \"b817b142-11f2-4a47-a22f-9c069367fdad\") " Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.797859 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b817b142-11f2-4a47-a22f-9c069367fdad-config-volume" (OuterVolumeSpecName: "config-volume") pod "b817b142-11f2-4a47-a22f-9c069367fdad" (UID: "b817b142-11f2-4a47-a22f-9c069367fdad"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.800126 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b817b142-11f2-4a47-a22f-9c069367fdad-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.804674 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b817b142-11f2-4a47-a22f-9c069367fdad-kube-api-access-4m6vm" (OuterVolumeSpecName: "kube-api-access-4m6vm") pod "b817b142-11f2-4a47-a22f-9c069367fdad" (UID: "b817b142-11f2-4a47-a22f-9c069367fdad"). InnerVolumeSpecName "kube-api-access-4m6vm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.804857 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b817b142-11f2-4a47-a22f-9c069367fdad-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b817b142-11f2-4a47-a22f-9c069367fdad" (UID: "b817b142-11f2-4a47-a22f-9c069367fdad"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.904697 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m6vm\" (UniqueName: \"kubernetes.io/projected/b817b142-11f2-4a47-a22f-9c069367fdad-kube-api-access-4m6vm\") on node \"crc\" DevicePath \"\"" Jan 22 13:45:03 crc kubenswrapper[4773]: I0122 13:45:03.905130 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b817b142-11f2-4a47-a22f-9c069367fdad-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 13:45:04 crc kubenswrapper[4773]: I0122 13:45:04.257052 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" event={"ID":"b817b142-11f2-4a47-a22f-9c069367fdad","Type":"ContainerDied","Data":"57247d1f1d2d15d615a3a725742bc1326f96e4821d19480176c664e56b03a118"} Jan 22 13:45:04 crc kubenswrapper[4773]: I0122 13:45:04.257101 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57247d1f1d2d15d615a3a725742bc1326f96e4821d19480176c664e56b03a118" Jan 22 13:45:04 crc kubenswrapper[4773]: I0122 13:45:04.257129 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg" Jan 22 13:45:04 crc kubenswrapper[4773]: I0122 13:45:04.353253 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb"] Jan 22 13:45:04 crc kubenswrapper[4773]: I0122 13:45:04.366850 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484780-9bdxb"] Jan 22 13:45:04 crc kubenswrapper[4773]: I0122 13:45:04.673018 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91935200-38fc-41c7-ac2c-af16ef175984" path="/var/lib/kubelet/pods/91935200-38fc-41c7-ac2c-af16ef175984/volumes" Jan 22 13:45:12 crc kubenswrapper[4773]: I0122 13:45:12.704552 4773 scope.go:117] "RemoveContainer" containerID="f3fa9825be045e2cf587331db081a69f769821a9699a4a66fdeae97704bd3d26" Jan 22 13:45:12 crc kubenswrapper[4773]: I0122 13:45:12.732905 4773 scope.go:117] "RemoveContainer" containerID="60702661b7c83eb6e6dc5e8f34f1bcd1513e5efd77c433803e28a657e3025bf5" Jan 22 13:45:15 crc kubenswrapper[4773]: I0122 13:45:15.659107 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:45:15 crc kubenswrapper[4773]: E0122 13:45:15.660210 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:45:29 crc kubenswrapper[4773]: I0122 13:45:29.658098 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:45:29 crc kubenswrapper[4773]: E0122 13:45:29.658896 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:45:39 crc kubenswrapper[4773]: I0122 13:45:39.056014 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-skl29"] Jan 22 13:45:39 crc kubenswrapper[4773]: I0122 13:45:39.070190 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-skl29"] Jan 22 13:45:40 crc kubenswrapper[4773]: I0122 13:45:40.039376 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-1767-account-create-update-h75t5"] Jan 22 13:45:40 crc kubenswrapper[4773]: I0122 13:45:40.051706 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-1767-account-create-update-h75t5"] Jan 22 13:45:40 crc kubenswrapper[4773]: I0122 13:45:40.675137 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78084244-a181-41ba-9993-87a0d3014a91" path="/var/lib/kubelet/pods/78084244-a181-41ba-9993-87a0d3014a91/volumes" Jan 22 13:45:40 crc kubenswrapper[4773]: I0122 13:45:40.676626 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8d6481c-aa92-4c21-8d99-7154c471951b" path="/var/lib/kubelet/pods/d8d6481c-aa92-4c21-8d99-7154c471951b/volumes" Jan 22 13:45:42 crc kubenswrapper[4773]: I0122 13:45:42.658267 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:45:42 crc kubenswrapper[4773]: E0122 13:45:42.658633 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:45:46 crc kubenswrapper[4773]: I0122 13:45:46.031563 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-tkbc8"] Jan 22 13:45:46 crc kubenswrapper[4773]: I0122 13:45:46.062718 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-tkbc8"] Jan 22 13:45:46 crc kubenswrapper[4773]: I0122 13:45:46.670713 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76d59e78-a628-42f0-aa02-eb2a5d96dac2" path="/var/lib/kubelet/pods/76d59e78-a628-42f0-aa02-eb2a5d96dac2/volumes" Jan 22 13:45:47 crc kubenswrapper[4773]: I0122 13:45:47.042494 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-3cf3-account-create-update-tgt4t"] Jan 22 13:45:47 crc kubenswrapper[4773]: I0122 13:45:47.059120 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-3cf3-account-create-update-tgt4t"] Jan 22 13:45:48 crc kubenswrapper[4773]: I0122 13:45:48.674012 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64009709-be80-4240-9fa6-15565662fd2e" path="/var/lib/kubelet/pods/64009709-be80-4240-9fa6-15565662fd2e/volumes" Jan 22 13:45:53 crc kubenswrapper[4773]: I0122 13:45:53.659826 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:45:53 crc kubenswrapper[4773]: E0122 13:45:53.660845 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:46:04 crc kubenswrapper[4773]: I0122 13:46:04.657930 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:46:04 crc kubenswrapper[4773]: E0122 13:46:04.658752 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:46:12 crc kubenswrapper[4773]: I0122 13:46:12.898500 4773 scope.go:117] "RemoveContainer" containerID="00d3da605324b27d5aceb91feea0f3cf173f8d9a75f4683662d6bed642c24fc1" Jan 22 13:46:12 crc kubenswrapper[4773]: I0122 13:46:12.941124 4773 scope.go:117] "RemoveContainer" containerID="66ebdeec548397d984c050c848e7c04e51a645ec48b00c8bf99c916589f2c4f7" Jan 22 13:46:13 crc kubenswrapper[4773]: I0122 13:46:13.015792 4773 scope.go:117] "RemoveContainer" containerID="41fb6d9f0a8ec944d628ac02af22b171d27c0b1b1eb264b243c3476727176991" Jan 22 13:46:13 crc kubenswrapper[4773]: I0122 13:46:13.071990 4773 scope.go:117] "RemoveContainer" containerID="f33dd1bb673722298edfa0578a13ea0bec6c4fd19dd0a53d92c07ba84bc2f579" Jan 22 13:46:15 crc kubenswrapper[4773]: I0122 13:46:15.659787 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:46:15 crc kubenswrapper[4773]: E0122 13:46:15.661075 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:46:27 crc kubenswrapper[4773]: I0122 13:46:27.658815 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:46:27 crc kubenswrapper[4773]: E0122 13:46:27.659642 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:46:42 crc kubenswrapper[4773]: I0122 13:46:42.047321 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-b6m44"] Jan 22 13:46:42 crc kubenswrapper[4773]: I0122 13:46:42.069504 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-b6m44"] Jan 22 13:46:42 crc kubenswrapper[4773]: I0122 13:46:42.666868 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:46:42 crc kubenswrapper[4773]: I0122 13:46:42.669044 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35a5e8ef-a82d-4026-98c0-b57c0a107208" path="/var/lib/kubelet/pods/35a5e8ef-a82d-4026-98c0-b57c0a107208/volumes" Jan 22 13:46:43 crc kubenswrapper[4773]: I0122 13:46:43.407271 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"b47baa0164b891015177a82736873324a0cec4115ca361eb866de456f186e0b2"} Jan 22 13:47:13 crc kubenswrapper[4773]: I0122 13:47:13.215639 4773 scope.go:117] "RemoveContainer" containerID="adf7f96ca2a96b386216f66a53081d021f5ddd46940334b8997eeb7eefb73342" Jan 22 13:47:13 crc kubenswrapper[4773]: I0122 13:47:13.256851 4773 scope.go:117] "RemoveContainer" containerID="07dcdb7979eec8cfc876588c70072ee39491cbc7f126eed9df73aa0625955496" Jan 22 13:49:04 crc kubenswrapper[4773]: I0122 13:49:04.074606 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:49:04 crc kubenswrapper[4773]: I0122 13:49:04.076336 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.151816 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s7dc7"] Jan 22 13:49:12 crc kubenswrapper[4773]: E0122 13:49:12.153098 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b817b142-11f2-4a47-a22f-9c069367fdad" containerName="collect-profiles" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.153120 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b817b142-11f2-4a47-a22f-9c069367fdad" containerName="collect-profiles" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.153424 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b817b142-11f2-4a47-a22f-9c069367fdad" containerName="collect-profiles" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.155647 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.168068 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7dc7"] Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.172917 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-catalog-content\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.172998 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-utilities\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.173089 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmbwv\" (UniqueName: \"kubernetes.io/projected/4ce7d95f-711c-469d-87a3-d010249feec9-kube-api-access-nmbwv\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.275489 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-catalog-content\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.275539 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-utilities\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.275642 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmbwv\" (UniqueName: \"kubernetes.io/projected/4ce7d95f-711c-469d-87a3-d010249feec9-kube-api-access-nmbwv\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.276172 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-catalog-content\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.276509 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-utilities\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.302970 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmbwv\" (UniqueName: \"kubernetes.io/projected/4ce7d95f-711c-469d-87a3-d010249feec9-kube-api-access-nmbwv\") pod \"redhat-marketplace-s7dc7\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.485579 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:12 crc kubenswrapper[4773]: I0122 13:49:12.988971 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7dc7"] Jan 22 13:49:13 crc kubenswrapper[4773]: I0122 13:49:13.576104 4773 generic.go:334] "Generic (PLEG): container finished" podID="4ce7d95f-711c-469d-87a3-d010249feec9" containerID="81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4" exitCode=0 Jan 22 13:49:13 crc kubenswrapper[4773]: I0122 13:49:13.576182 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerDied","Data":"81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4"} Jan 22 13:49:13 crc kubenswrapper[4773]: I0122 13:49:13.576651 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerStarted","Data":"1b2de3e597d6359152d542385495a8fb027cff0fb367fb76430e98b104eba13b"} Jan 22 13:49:13 crc kubenswrapper[4773]: I0122 13:49:13.579053 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:49:14 crc kubenswrapper[4773]: I0122 13:49:14.587087 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerStarted","Data":"529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9"} Jan 22 13:49:15 crc kubenswrapper[4773]: E0122 13:49:15.340089 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ce7d95f_711c_469d_87a3_d010249feec9.slice/crio-conmon-529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9.scope\": RecentStats: unable to find data in memory cache]" Jan 22 13:49:15 crc kubenswrapper[4773]: I0122 13:49:15.601906 4773 generic.go:334] "Generic (PLEG): container finished" podID="4ce7d95f-711c-469d-87a3-d010249feec9" containerID="529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9" exitCode=0 Jan 22 13:49:15 crc kubenswrapper[4773]: I0122 13:49:15.602269 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerDied","Data":"529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9"} Jan 22 13:49:16 crc kubenswrapper[4773]: I0122 13:49:16.622184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerStarted","Data":"4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2"} Jan 22 13:49:16 crc kubenswrapper[4773]: I0122 13:49:16.649024 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s7dc7" podStartSLOduration=2.233536888 podStartE2EDuration="4.64898191s" podCreationTimestamp="2026-01-22 13:49:12 +0000 UTC" firstStartedPulling="2026-01-22 13:49:13.578815758 +0000 UTC m=+6861.156931573" lastFinishedPulling="2026-01-22 13:49:15.99426077 +0000 UTC m=+6863.572376595" observedRunningTime="2026-01-22 13:49:16.646977804 +0000 UTC m=+6864.225093669" watchObservedRunningTime="2026-01-22 13:49:16.64898191 +0000 UTC m=+6864.227097785" Jan 22 13:49:22 crc kubenswrapper[4773]: I0122 13:49:22.486640 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:22 crc kubenswrapper[4773]: I0122 13:49:22.487523 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:22 crc kubenswrapper[4773]: I0122 13:49:22.574527 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:22 crc kubenswrapper[4773]: I0122 13:49:22.759739 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:22 crc kubenswrapper[4773]: I0122 13:49:22.832822 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7dc7"] Jan 22 13:49:24 crc kubenswrapper[4773]: I0122 13:49:24.704724 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s7dc7" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="registry-server" containerID="cri-o://4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2" gracePeriod=2 Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.216604 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.298522 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmbwv\" (UniqueName: \"kubernetes.io/projected/4ce7d95f-711c-469d-87a3-d010249feec9-kube-api-access-nmbwv\") pod \"4ce7d95f-711c-469d-87a3-d010249feec9\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.298720 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-utilities\") pod \"4ce7d95f-711c-469d-87a3-d010249feec9\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.298791 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-catalog-content\") pod \"4ce7d95f-711c-469d-87a3-d010249feec9\" (UID: \"4ce7d95f-711c-469d-87a3-d010249feec9\") " Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.300185 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-utilities" (OuterVolumeSpecName: "utilities") pod "4ce7d95f-711c-469d-87a3-d010249feec9" (UID: "4ce7d95f-711c-469d-87a3-d010249feec9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.308179 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ce7d95f-711c-469d-87a3-d010249feec9-kube-api-access-nmbwv" (OuterVolumeSpecName: "kube-api-access-nmbwv") pod "4ce7d95f-711c-469d-87a3-d010249feec9" (UID: "4ce7d95f-711c-469d-87a3-d010249feec9"). InnerVolumeSpecName "kube-api-access-nmbwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.344226 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ce7d95f-711c-469d-87a3-d010249feec9" (UID: "4ce7d95f-711c-469d-87a3-d010249feec9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.401061 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.401088 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ce7d95f-711c-469d-87a3-d010249feec9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.401102 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmbwv\" (UniqueName: \"kubernetes.io/projected/4ce7d95f-711c-469d-87a3-d010249feec9-kube-api-access-nmbwv\") on node \"crc\" DevicePath \"\"" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.716613 4773 generic.go:334] "Generic (PLEG): container finished" podID="4ce7d95f-711c-469d-87a3-d010249feec9" containerID="4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2" exitCode=0 Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.716694 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s7dc7" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.716680 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerDied","Data":"4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2"} Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.716855 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s7dc7" event={"ID":"4ce7d95f-711c-469d-87a3-d010249feec9","Type":"ContainerDied","Data":"1b2de3e597d6359152d542385495a8fb027cff0fb367fb76430e98b104eba13b"} Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.716896 4773 scope.go:117] "RemoveContainer" containerID="4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.752524 4773 scope.go:117] "RemoveContainer" containerID="529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.759046 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7dc7"] Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.768198 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s7dc7"] Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.792065 4773 scope.go:117] "RemoveContainer" containerID="81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.824588 4773 scope.go:117] "RemoveContainer" containerID="4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2" Jan 22 13:49:25 crc kubenswrapper[4773]: E0122 13:49:25.826079 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2\": container with ID starting with 4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2 not found: ID does not exist" containerID="4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.826122 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2"} err="failed to get container status \"4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2\": rpc error: code = NotFound desc = could not find container \"4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2\": container with ID starting with 4bfa811cc676ee640b025615b7a5344a591b884709ad884989036ec3ef7156a2 not found: ID does not exist" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.826153 4773 scope.go:117] "RemoveContainer" containerID="529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9" Jan 22 13:49:25 crc kubenswrapper[4773]: E0122 13:49:25.826833 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9\": container with ID starting with 529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9 not found: ID does not exist" containerID="529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.826887 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9"} err="failed to get container status \"529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9\": rpc error: code = NotFound desc = could not find container \"529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9\": container with ID starting with 529caef520fed119317af29cf41c5c7d8d166b56bf6dadfe095f28c8eaf898f9 not found: ID does not exist" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.826923 4773 scope.go:117] "RemoveContainer" containerID="81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4" Jan 22 13:49:25 crc kubenswrapper[4773]: E0122 13:49:25.827866 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4\": container with ID starting with 81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4 not found: ID does not exist" containerID="81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4" Jan 22 13:49:25 crc kubenswrapper[4773]: I0122 13:49:25.827898 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4"} err="failed to get container status \"81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4\": rpc error: code = NotFound desc = could not find container \"81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4\": container with ID starting with 81d8f7c4d58c588cb6d87534581eea138fa35d29aecafa0d6f7a18a0e2f198b4 not found: ID does not exist" Jan 22 13:49:26 crc kubenswrapper[4773]: I0122 13:49:26.675933 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" path="/var/lib/kubelet/pods/4ce7d95f-711c-469d-87a3-d010249feec9/volumes" Jan 22 13:49:34 crc kubenswrapper[4773]: I0122 13:49:34.073993 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:49:34 crc kubenswrapper[4773]: I0122 13:49:34.074614 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:50:00 crc kubenswrapper[4773]: I0122 13:50:00.056659 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-ht6ks"] Jan 22 13:50:00 crc kubenswrapper[4773]: I0122 13:50:00.069228 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-3c56-account-create-update-8fhjd"] Jan 22 13:50:00 crc kubenswrapper[4773]: I0122 13:50:00.078297 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-ht6ks"] Jan 22 13:50:00 crc kubenswrapper[4773]: I0122 13:50:00.085598 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-3c56-account-create-update-8fhjd"] Jan 22 13:50:00 crc kubenswrapper[4773]: I0122 13:50:00.673057 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39f5a860-7df2-4ef7-a42e-e5e5baba0fba" path="/var/lib/kubelet/pods/39f5a860-7df2-4ef7-a42e-e5e5baba0fba/volumes" Jan 22 13:50:00 crc kubenswrapper[4773]: I0122 13:50:00.673935 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="933daae7-11ca-4cde-9008-74ca1b6cbcb4" path="/var/lib/kubelet/pods/933daae7-11ca-4cde-9008-74ca1b6cbcb4/volumes" Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.074827 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.075670 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.075784 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.077578 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b47baa0164b891015177a82736873324a0cec4115ca361eb866de456f186e0b2"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.077731 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://b47baa0164b891015177a82736873324a0cec4115ca361eb866de456f186e0b2" gracePeriod=600 Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.229016 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="b47baa0164b891015177a82736873324a0cec4115ca361eb866de456f186e0b2" exitCode=0 Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.229263 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"b47baa0164b891015177a82736873324a0cec4115ca361eb866de456f186e0b2"} Jan 22 13:50:04 crc kubenswrapper[4773]: I0122 13:50:04.229460 4773 scope.go:117] "RemoveContainer" containerID="2de4493482e60d796f28e4e2a88e4975e3b3ae31970fa73de30d9ce31962f810" Jan 22 13:50:05 crc kubenswrapper[4773]: I0122 13:50:05.294362 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96"} Jan 22 13:50:13 crc kubenswrapper[4773]: I0122 13:50:13.381845 4773 scope.go:117] "RemoveContainer" containerID="ee6de0b555c1fb64b932800bbf6209e4bab9e688301441d8a8879d9e22a744e6" Jan 22 13:50:13 crc kubenswrapper[4773]: I0122 13:50:13.442718 4773 scope.go:117] "RemoveContainer" containerID="607b1866e38fa2653de84abdf51a18d3c10580b3854250bc7afbcd58cc6d0a30" Jan 22 13:50:17 crc kubenswrapper[4773]: I0122 13:50:17.057958 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-g6ml5"] Jan 22 13:50:17 crc kubenswrapper[4773]: I0122 13:50:17.074253 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-g6ml5"] Jan 22 13:50:18 crc kubenswrapper[4773]: I0122 13:50:18.672413 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4543c21-3185-489c-827f-742d491ee70c" path="/var/lib/kubelet/pods/d4543c21-3185-489c-827f-742d491ee70c/volumes" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.016302 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-k58f5"] Jan 22 13:50:48 crc kubenswrapper[4773]: E0122 13:50:48.017513 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="extract-content" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.017531 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="extract-content" Jan 22 13:50:48 crc kubenswrapper[4773]: E0122 13:50:48.017552 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="extract-utilities" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.017562 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="extract-utilities" Jan 22 13:50:48 crc kubenswrapper[4773]: E0122 13:50:48.017584 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="registry-server" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.017595 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="registry-server" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.017896 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ce7d95f-711c-469d-87a3-d010249feec9" containerName="registry-server" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.019976 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.032124 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k58f5"] Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.154901 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgqlj\" (UniqueName: \"kubernetes.io/projected/20837049-c8c4-4248-814d-2c4764620498-kube-api-access-mgqlj\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.155015 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-catalog-content\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.155079 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-utilities\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.211098 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nt69g"] Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.216137 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.231730 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nt69g"] Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.257110 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgqlj\" (UniqueName: \"kubernetes.io/projected/20837049-c8c4-4248-814d-2c4764620498-kube-api-access-mgqlj\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.257184 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-catalog-content\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.257249 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-utilities\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.257875 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-utilities\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.257996 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-catalog-content\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.306978 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgqlj\" (UniqueName: \"kubernetes.io/projected/20837049-c8c4-4248-814d-2c4764620498-kube-api-access-mgqlj\") pod \"certified-operators-k58f5\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.360907 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.362414 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-catalog-content\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.362533 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-utilities\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.362637 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djg2f\" (UniqueName: \"kubernetes.io/projected/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-kube-api-access-djg2f\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.464585 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-catalog-content\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.464922 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-utilities\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.465019 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djg2f\" (UniqueName: \"kubernetes.io/projected/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-kube-api-access-djg2f\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.465798 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-catalog-content\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.466178 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-utilities\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.518621 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djg2f\" (UniqueName: \"kubernetes.io/projected/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-kube-api-access-djg2f\") pod \"community-operators-nt69g\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.537118 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:48 crc kubenswrapper[4773]: I0122 13:50:48.995955 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-k58f5"] Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.110041 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nt69g"] Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.918623 4773 generic.go:334] "Generic (PLEG): container finished" podID="20837049-c8c4-4248-814d-2c4764620498" containerID="1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698" exitCode=0 Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.918697 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerDied","Data":"1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698"} Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.918993 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerStarted","Data":"01df94aeb048234882252a4103fcbe48f9a0532df8636d02be9d29a82b82b567"} Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.921661 4773 generic.go:334] "Generic (PLEG): container finished" podID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerID="16e1f664c0706fa57a95e490f31dfff5be8d24370e569a4c30cb27aa14151e9a" exitCode=0 Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.921699 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerDied","Data":"16e1f664c0706fa57a95e490f31dfff5be8d24370e569a4c30cb27aa14151e9a"} Jan 22 13:50:49 crc kubenswrapper[4773]: I0122 13:50:49.921727 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerStarted","Data":"0ddcf0de3665babca9b3933e4c09447c71ec4c1dd2d313c2f94877528674b8c4"} Jan 22 13:50:50 crc kubenswrapper[4773]: I0122 13:50:50.937779 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerStarted","Data":"cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb"} Jan 22 13:50:50 crc kubenswrapper[4773]: I0122 13:50:50.947460 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerStarted","Data":"452e03e9fdb87c14ccc8755e1643663deffad2bb08624d47551434d12b90e070"} Jan 22 13:50:53 crc kubenswrapper[4773]: I0122 13:50:53.985299 4773 generic.go:334] "Generic (PLEG): container finished" podID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerID="452e03e9fdb87c14ccc8755e1643663deffad2bb08624d47551434d12b90e070" exitCode=0 Jan 22 13:50:53 crc kubenswrapper[4773]: I0122 13:50:53.985446 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerDied","Data":"452e03e9fdb87c14ccc8755e1643663deffad2bb08624d47551434d12b90e070"} Jan 22 13:50:53 crc kubenswrapper[4773]: I0122 13:50:53.988676 4773 generic.go:334] "Generic (PLEG): container finished" podID="20837049-c8c4-4248-814d-2c4764620498" containerID="cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb" exitCode=0 Jan 22 13:50:53 crc kubenswrapper[4773]: I0122 13:50:53.988825 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerDied","Data":"cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb"} Jan 22 13:50:55 crc kubenswrapper[4773]: I0122 13:50:55.003344 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerStarted","Data":"ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d"} Jan 22 13:50:55 crc kubenswrapper[4773]: I0122 13:50:55.005232 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerStarted","Data":"11a1f825eb4e244ed052022bc302477d33b90d273c3c1e2329f2dc9e3ef740cb"} Jan 22 13:50:55 crc kubenswrapper[4773]: I0122 13:50:55.041674 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-k58f5" podStartSLOduration=3.5317371829999997 podStartE2EDuration="8.041641291s" podCreationTimestamp="2026-01-22 13:50:47 +0000 UTC" firstStartedPulling="2026-01-22 13:50:49.920998775 +0000 UTC m=+6957.499114630" lastFinishedPulling="2026-01-22 13:50:54.430902913 +0000 UTC m=+6962.009018738" observedRunningTime="2026-01-22 13:50:55.029487737 +0000 UTC m=+6962.607603602" watchObservedRunningTime="2026-01-22 13:50:55.041641291 +0000 UTC m=+6962.619757156" Jan 22 13:50:55 crc kubenswrapper[4773]: I0122 13:50:55.181623 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nt69g" podStartSLOduration=2.6824741149999998 podStartE2EDuration="7.181602628s" podCreationTimestamp="2026-01-22 13:50:48 +0000 UTC" firstStartedPulling="2026-01-22 13:50:49.924846774 +0000 UTC m=+6957.502962609" lastFinishedPulling="2026-01-22 13:50:54.423975287 +0000 UTC m=+6962.002091122" observedRunningTime="2026-01-22 13:50:55.172912312 +0000 UTC m=+6962.751028137" watchObservedRunningTime="2026-01-22 13:50:55.181602628 +0000 UTC m=+6962.759718453" Jan 22 13:50:58 crc kubenswrapper[4773]: I0122 13:50:58.361402 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:58 crc kubenswrapper[4773]: I0122 13:50:58.363328 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:58 crc kubenswrapper[4773]: I0122 13:50:58.412712 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:58 crc kubenswrapper[4773]: I0122 13:50:58.538444 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:58 crc kubenswrapper[4773]: I0122 13:50:58.538490 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:50:59 crc kubenswrapper[4773]: I0122 13:50:59.181557 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:50:59 crc kubenswrapper[4773]: I0122 13:50:59.595654 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-nt69g" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="registry-server" probeResult="failure" output=< Jan 22 13:50:59 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:50:59 crc kubenswrapper[4773]: > Jan 22 13:51:00 crc kubenswrapper[4773]: I0122 13:51:00.803494 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k58f5"] Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.137609 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-k58f5" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="registry-server" containerID="cri-o://ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d" gracePeriod=2 Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.611373 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.781225 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-catalog-content\") pod \"20837049-c8c4-4248-814d-2c4764620498\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.781344 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-utilities\") pod \"20837049-c8c4-4248-814d-2c4764620498\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.781547 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgqlj\" (UniqueName: \"kubernetes.io/projected/20837049-c8c4-4248-814d-2c4764620498-kube-api-access-mgqlj\") pod \"20837049-c8c4-4248-814d-2c4764620498\" (UID: \"20837049-c8c4-4248-814d-2c4764620498\") " Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.783103 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-utilities" (OuterVolumeSpecName: "utilities") pod "20837049-c8c4-4248-814d-2c4764620498" (UID: "20837049-c8c4-4248-814d-2c4764620498"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.787645 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20837049-c8c4-4248-814d-2c4764620498-kube-api-access-mgqlj" (OuterVolumeSpecName: "kube-api-access-mgqlj") pod "20837049-c8c4-4248-814d-2c4764620498" (UID: "20837049-c8c4-4248-814d-2c4764620498"). InnerVolumeSpecName "kube-api-access-mgqlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.846187 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20837049-c8c4-4248-814d-2c4764620498" (UID: "20837049-c8c4-4248-814d-2c4764620498"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.884879 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgqlj\" (UniqueName: \"kubernetes.io/projected/20837049-c8c4-4248-814d-2c4764620498-kube-api-access-mgqlj\") on node \"crc\" DevicePath \"\"" Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.884929 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:51:02 crc kubenswrapper[4773]: I0122 13:51:02.884942 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20837049-c8c4-4248-814d-2c4764620498-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.153792 4773 generic.go:334] "Generic (PLEG): container finished" podID="20837049-c8c4-4248-814d-2c4764620498" containerID="ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d" exitCode=0 Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.153835 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerDied","Data":"ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d"} Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.153862 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-k58f5" event={"ID":"20837049-c8c4-4248-814d-2c4764620498","Type":"ContainerDied","Data":"01df94aeb048234882252a4103fcbe48f9a0532df8636d02be9d29a82b82b567"} Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.153879 4773 scope.go:117] "RemoveContainer" containerID="ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.153884 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-k58f5" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.188441 4773 scope.go:117] "RemoveContainer" containerID="cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.208235 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-k58f5"] Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.230523 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-k58f5"] Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.232222 4773 scope.go:117] "RemoveContainer" containerID="1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.279280 4773 scope.go:117] "RemoveContainer" containerID="ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d" Jan 22 13:51:03 crc kubenswrapper[4773]: E0122 13:51:03.280122 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d\": container with ID starting with ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d not found: ID does not exist" containerID="ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.280187 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d"} err="failed to get container status \"ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d\": rpc error: code = NotFound desc = could not find container \"ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d\": container with ID starting with ba941582f60f3b42e8da6101025d6259b556be46cbc25f9fd3c246df19413a9d not found: ID does not exist" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.280231 4773 scope.go:117] "RemoveContainer" containerID="cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb" Jan 22 13:51:03 crc kubenswrapper[4773]: E0122 13:51:03.280802 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb\": container with ID starting with cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb not found: ID does not exist" containerID="cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.280981 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb"} err="failed to get container status \"cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb\": rpc error: code = NotFound desc = could not find container \"cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb\": container with ID starting with cb36d369c76dc525dd6fb2854337cb78125b61fcfd1f646bdd4f926f2acfabfb not found: ID does not exist" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.281131 4773 scope.go:117] "RemoveContainer" containerID="1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698" Jan 22 13:51:03 crc kubenswrapper[4773]: E0122 13:51:03.281667 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698\": container with ID starting with 1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698 not found: ID does not exist" containerID="1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698" Jan 22 13:51:03 crc kubenswrapper[4773]: I0122 13:51:03.281707 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698"} err="failed to get container status \"1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698\": rpc error: code = NotFound desc = could not find container \"1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698\": container with ID starting with 1f85e0c2be8f9588ed7b1ba18c9d87f3ec2aef8fec17350380182b3e7239d698 not found: ID does not exist" Jan 22 13:51:04 crc kubenswrapper[4773]: I0122 13:51:04.679688 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20837049-c8c4-4248-814d-2c4764620498" path="/var/lib/kubelet/pods/20837049-c8c4-4248-814d-2c4764620498/volumes" Jan 22 13:51:08 crc kubenswrapper[4773]: I0122 13:51:08.618945 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:51:08 crc kubenswrapper[4773]: I0122 13:51:08.687649 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:51:08 crc kubenswrapper[4773]: I0122 13:51:08.856325 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nt69g"] Jan 22 13:51:10 crc kubenswrapper[4773]: I0122 13:51:10.241100 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nt69g" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="registry-server" containerID="cri-o://11a1f825eb4e244ed052022bc302477d33b90d273c3c1e2329f2dc9e3ef740cb" gracePeriod=2 Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.258257 4773 generic.go:334] "Generic (PLEG): container finished" podID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerID="11a1f825eb4e244ed052022bc302477d33b90d273c3c1e2329f2dc9e3ef740cb" exitCode=0 Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.258546 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerDied","Data":"11a1f825eb4e244ed052022bc302477d33b90d273c3c1e2329f2dc9e3ef740cb"} Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.258592 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nt69g" event={"ID":"c5bf9899-eacd-4c7e-af16-d5428cc0fb96","Type":"ContainerDied","Data":"0ddcf0de3665babca9b3933e4c09447c71ec4c1dd2d313c2f94877528674b8c4"} Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.258606 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ddcf0de3665babca9b3933e4c09447c71ec4c1dd2d313c2f94877528674b8c4" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.281392 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.393403 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-catalog-content\") pod \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.394124 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-utilities\") pod \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.394386 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djg2f\" (UniqueName: \"kubernetes.io/projected/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-kube-api-access-djg2f\") pod \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\" (UID: \"c5bf9899-eacd-4c7e-af16-d5428cc0fb96\") " Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.395572 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-utilities" (OuterVolumeSpecName: "utilities") pod "c5bf9899-eacd-4c7e-af16-d5428cc0fb96" (UID: "c5bf9899-eacd-4c7e-af16-d5428cc0fb96"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.399197 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-kube-api-access-djg2f" (OuterVolumeSpecName: "kube-api-access-djg2f") pod "c5bf9899-eacd-4c7e-af16-d5428cc0fb96" (UID: "c5bf9899-eacd-4c7e-af16-d5428cc0fb96"). InnerVolumeSpecName "kube-api-access-djg2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.461419 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5bf9899-eacd-4c7e-af16-d5428cc0fb96" (UID: "c5bf9899-eacd-4c7e-af16-d5428cc0fb96"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.498568 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djg2f\" (UniqueName: \"kubernetes.io/projected/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-kube-api-access-djg2f\") on node \"crc\" DevicePath \"\"" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.498875 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:51:11 crc kubenswrapper[4773]: I0122 13:51:11.498897 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5bf9899-eacd-4c7e-af16-d5428cc0fb96-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:51:12 crc kubenswrapper[4773]: I0122 13:51:12.272710 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nt69g" Jan 22 13:51:12 crc kubenswrapper[4773]: I0122 13:51:12.325086 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nt69g"] Jan 22 13:51:12 crc kubenswrapper[4773]: I0122 13:51:12.335245 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nt69g"] Jan 22 13:51:12 crc kubenswrapper[4773]: I0122 13:51:12.676050 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" path="/var/lib/kubelet/pods/c5bf9899-eacd-4c7e-af16-d5428cc0fb96/volumes" Jan 22 13:51:13 crc kubenswrapper[4773]: I0122 13:51:13.599273 4773 scope.go:117] "RemoveContainer" containerID="d034c0c0bc553f19b007d2313cf284b4a7d346ad689ab0720f0e8897b269c096" Jan 22 13:52:04 crc kubenswrapper[4773]: I0122 13:52:04.074339 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:52:04 crc kubenswrapper[4773]: I0122 13:52:04.074891 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.786306 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-29z7q"] Jan 22 13:52:16 crc kubenswrapper[4773]: E0122 13:52:16.787232 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="extract-utilities" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787245 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="extract-utilities" Jan 22 13:52:16 crc kubenswrapper[4773]: E0122 13:52:16.787265 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="extract-content" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787270 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="extract-content" Jan 22 13:52:16 crc kubenswrapper[4773]: E0122 13:52:16.787302 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="registry-server" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787308 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="registry-server" Jan 22 13:52:16 crc kubenswrapper[4773]: E0122 13:52:16.787321 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="extract-content" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787328 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="extract-content" Jan 22 13:52:16 crc kubenswrapper[4773]: E0122 13:52:16.787351 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="registry-server" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787358 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="registry-server" Jan 22 13:52:16 crc kubenswrapper[4773]: E0122 13:52:16.787373 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="extract-utilities" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787381 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="extract-utilities" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787595 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="20837049-c8c4-4248-814d-2c4764620498" containerName="registry-server" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.787625 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5bf9899-eacd-4c7e-af16-d5428cc0fb96" containerName="registry-server" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.789688 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.804237 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-29z7q"] Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.903368 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-utilities\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.903863 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5kpj\" (UniqueName: \"kubernetes.io/projected/5c2c7109-994b-4714-838c-af00edf00c2a-kube-api-access-z5kpj\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:16 crc kubenswrapper[4773]: I0122 13:52:16.903905 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-catalog-content\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.006241 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5kpj\" (UniqueName: \"kubernetes.io/projected/5c2c7109-994b-4714-838c-af00edf00c2a-kube-api-access-z5kpj\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.006305 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-catalog-content\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.006419 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-utilities\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.006926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-utilities\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.008667 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-catalog-content\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.029365 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5kpj\" (UniqueName: \"kubernetes.io/projected/5c2c7109-994b-4714-838c-af00edf00c2a-kube-api-access-z5kpj\") pod \"redhat-operators-29z7q\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.127227 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:17 crc kubenswrapper[4773]: I0122 13:52:17.620197 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-29z7q"] Jan 22 13:52:18 crc kubenswrapper[4773]: I0122 13:52:18.053819 4773 generic.go:334] "Generic (PLEG): container finished" podID="5c2c7109-994b-4714-838c-af00edf00c2a" containerID="97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506" exitCode=0 Jan 22 13:52:18 crc kubenswrapper[4773]: I0122 13:52:18.053937 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerDied","Data":"97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506"} Jan 22 13:52:18 crc kubenswrapper[4773]: I0122 13:52:18.054129 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerStarted","Data":"62e740a33fdc8e3fd744377301187d1e9f31f248901adbe91f76d64f6ab08292"} Jan 22 13:52:20 crc kubenswrapper[4773]: I0122 13:52:20.078035 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerStarted","Data":"b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b"} Jan 22 13:52:24 crc kubenswrapper[4773]: I0122 13:52:24.124373 4773 generic.go:334] "Generic (PLEG): container finished" podID="5c2c7109-994b-4714-838c-af00edf00c2a" containerID="b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b" exitCode=0 Jan 22 13:52:24 crc kubenswrapper[4773]: I0122 13:52:24.124479 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerDied","Data":"b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b"} Jan 22 13:52:25 crc kubenswrapper[4773]: I0122 13:52:25.141653 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerStarted","Data":"a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf"} Jan 22 13:52:27 crc kubenswrapper[4773]: I0122 13:52:27.128260 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:27 crc kubenswrapper[4773]: I0122 13:52:27.128873 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:28 crc kubenswrapper[4773]: I0122 13:52:28.189964 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-29z7q" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="registry-server" probeResult="failure" output=< Jan 22 13:52:28 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 13:52:28 crc kubenswrapper[4773]: > Jan 22 13:52:34 crc kubenswrapper[4773]: I0122 13:52:34.074473 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:52:34 crc kubenswrapper[4773]: I0122 13:52:34.075207 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:52:37 crc kubenswrapper[4773]: I0122 13:52:37.184719 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:37 crc kubenswrapper[4773]: I0122 13:52:37.202767 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-29z7q" podStartSLOduration=14.675277895 podStartE2EDuration="21.202747035s" podCreationTimestamp="2026-01-22 13:52:16 +0000 UTC" firstStartedPulling="2026-01-22 13:52:18.055973291 +0000 UTC m=+7045.634089126" lastFinishedPulling="2026-01-22 13:52:24.583442431 +0000 UTC m=+7052.161558266" observedRunningTime="2026-01-22 13:52:25.17476525 +0000 UTC m=+7052.752881135" watchObservedRunningTime="2026-01-22 13:52:37.202747035 +0000 UTC m=+7064.780862860" Jan 22 13:52:37 crc kubenswrapper[4773]: I0122 13:52:37.236331 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:37 crc kubenswrapper[4773]: I0122 13:52:37.425840 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-29z7q"] Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.281132 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-29z7q" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="registry-server" containerID="cri-o://a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf" gracePeriod=2 Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.809235 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.884203 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-utilities\") pod \"5c2c7109-994b-4714-838c-af00edf00c2a\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.884265 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-catalog-content\") pod \"5c2c7109-994b-4714-838c-af00edf00c2a\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.884383 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5kpj\" (UniqueName: \"kubernetes.io/projected/5c2c7109-994b-4714-838c-af00edf00c2a-kube-api-access-z5kpj\") pod \"5c2c7109-994b-4714-838c-af00edf00c2a\" (UID: \"5c2c7109-994b-4714-838c-af00edf00c2a\") " Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.886981 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-utilities" (OuterVolumeSpecName: "utilities") pod "5c2c7109-994b-4714-838c-af00edf00c2a" (UID: "5c2c7109-994b-4714-838c-af00edf00c2a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.905772 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c2c7109-994b-4714-838c-af00edf00c2a-kube-api-access-z5kpj" (OuterVolumeSpecName: "kube-api-access-z5kpj") pod "5c2c7109-994b-4714-838c-af00edf00c2a" (UID: "5c2c7109-994b-4714-838c-af00edf00c2a"). InnerVolumeSpecName "kube-api-access-z5kpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.987794 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5kpj\" (UniqueName: \"kubernetes.io/projected/5c2c7109-994b-4714-838c-af00edf00c2a-kube-api-access-z5kpj\") on node \"crc\" DevicePath \"\"" Jan 22 13:52:38 crc kubenswrapper[4773]: I0122 13:52:38.987839 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.035689 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c2c7109-994b-4714-838c-af00edf00c2a" (UID: "5c2c7109-994b-4714-838c-af00edf00c2a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.090375 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c2c7109-994b-4714-838c-af00edf00c2a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.293407 4773 generic.go:334] "Generic (PLEG): container finished" podID="5c2c7109-994b-4714-838c-af00edf00c2a" containerID="a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf" exitCode=0 Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.293524 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29z7q" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.293651 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerDied","Data":"a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf"} Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.293693 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29z7q" event={"ID":"5c2c7109-994b-4714-838c-af00edf00c2a","Type":"ContainerDied","Data":"62e740a33fdc8e3fd744377301187d1e9f31f248901adbe91f76d64f6ab08292"} Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.293717 4773 scope.go:117] "RemoveContainer" containerID="a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.322841 4773 scope.go:117] "RemoveContainer" containerID="b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.340700 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-29z7q"] Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.346930 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-29z7q"] Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.365209 4773 scope.go:117] "RemoveContainer" containerID="97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.454538 4773 scope.go:117] "RemoveContainer" containerID="a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf" Jan 22 13:52:39 crc kubenswrapper[4773]: E0122 13:52:39.463767 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf\": container with ID starting with a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf not found: ID does not exist" containerID="a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.463819 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf"} err="failed to get container status \"a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf\": rpc error: code = NotFound desc = could not find container \"a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf\": container with ID starting with a982cd54bcf0ecd066c308aa736ff09e5c86484b9e72a96e3f89c6fa2d766fcf not found: ID does not exist" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.463875 4773 scope.go:117] "RemoveContainer" containerID="b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b" Jan 22 13:52:39 crc kubenswrapper[4773]: E0122 13:52:39.480413 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b\": container with ID starting with b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b not found: ID does not exist" containerID="b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.480993 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b"} err="failed to get container status \"b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b\": rpc error: code = NotFound desc = could not find container \"b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b\": container with ID starting with b3d3840a034a3ff3fc84bf506ed4c717f9742133af52c36e5415001d934c854b not found: ID does not exist" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.481105 4773 scope.go:117] "RemoveContainer" containerID="97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506" Jan 22 13:52:39 crc kubenswrapper[4773]: E0122 13:52:39.481615 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506\": container with ID starting with 97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506 not found: ID does not exist" containerID="97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506" Jan 22 13:52:39 crc kubenswrapper[4773]: I0122 13:52:39.481676 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506"} err="failed to get container status \"97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506\": rpc error: code = NotFound desc = could not find container \"97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506\": container with ID starting with 97267ea26ed522cd788f743b1560526b946560622e06104fe0a17a586dd01506 not found: ID does not exist" Jan 22 13:52:40 crc kubenswrapper[4773]: I0122 13:52:40.682612 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" path="/var/lib/kubelet/pods/5c2c7109-994b-4714-838c-af00edf00c2a/volumes" Jan 22 13:53:01 crc kubenswrapper[4773]: I0122 13:53:01.067159 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-fc02-account-create-update-l6hb2"] Jan 22 13:53:01 crc kubenswrapper[4773]: I0122 13:53:01.081204 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-gwgqx"] Jan 22 13:53:01 crc kubenswrapper[4773]: I0122 13:53:01.093726 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-fc02-account-create-update-l6hb2"] Jan 22 13:53:01 crc kubenswrapper[4773]: I0122 13:53:01.102499 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-gwgqx"] Jan 22 13:53:02 crc kubenswrapper[4773]: I0122 13:53:02.679723 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="476de395-9692-4d5d-9283-91f71b6d049b" path="/var/lib/kubelet/pods/476de395-9692-4d5d-9283-91f71b6d049b/volumes" Jan 22 13:53:02 crc kubenswrapper[4773]: I0122 13:53:02.681399 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="768bc147-1241-4613-a1c0-13b218690eb8" path="/var/lib/kubelet/pods/768bc147-1241-4613-a1c0-13b218690eb8/volumes" Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.074238 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.075242 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.075344 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.076503 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.076598 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" gracePeriod=600 Jan 22 13:53:04 crc kubenswrapper[4773]: E0122 13:53:04.202582 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.588759 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" exitCode=0 Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.588830 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96"} Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.588894 4773 scope.go:117] "RemoveContainer" containerID="b47baa0164b891015177a82736873324a0cec4115ca361eb866de456f186e0b2" Jan 22 13:53:04 crc kubenswrapper[4773]: I0122 13:53:04.590323 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:53:04 crc kubenswrapper[4773]: E0122 13:53:04.591108 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:53:11 crc kubenswrapper[4773]: I0122 13:53:11.033572 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-sn4wr"] Jan 22 13:53:11 crc kubenswrapper[4773]: I0122 13:53:11.044897 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-sn4wr"] Jan 22 13:53:12 crc kubenswrapper[4773]: I0122 13:53:12.675007 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e072ba5-f7d6-4378-9f71-b283a1a37bc0" path="/var/lib/kubelet/pods/5e072ba5-f7d6-4378-9f71-b283a1a37bc0/volumes" Jan 22 13:53:13 crc kubenswrapper[4773]: I0122 13:53:13.711948 4773 scope.go:117] "RemoveContainer" containerID="d7acd7d4a3ee3f37fc2c39f7160bcfba6fae907b5c51a1f62eb3369393100de5" Jan 22 13:53:13 crc kubenswrapper[4773]: I0122 13:53:13.784693 4773 scope.go:117] "RemoveContainer" containerID="36741715a434ecd8c59a9ec8c0735d450ec657fd242d2e1b0d998bcddf22a548" Jan 22 13:53:13 crc kubenswrapper[4773]: I0122 13:53:13.835974 4773 scope.go:117] "RemoveContainer" containerID="d5983520ccfe71aeeef3f25b4ee0de79de89ac13a1aaac17deaa2426eda22d76" Jan 22 13:53:15 crc kubenswrapper[4773]: I0122 13:53:15.658890 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:53:15 crc kubenswrapper[4773]: E0122 13:53:15.659509 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:53:26 crc kubenswrapper[4773]: I0122 13:53:26.658789 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:53:26 crc kubenswrapper[4773]: E0122 13:53:26.659618 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:53:38 crc kubenswrapper[4773]: I0122 13:53:38.659152 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:53:38 crc kubenswrapper[4773]: E0122 13:53:38.660159 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:53:51 crc kubenswrapper[4773]: I0122 13:53:51.658404 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:53:51 crc kubenswrapper[4773]: E0122 13:53:51.659210 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:54:04 crc kubenswrapper[4773]: I0122 13:54:04.658650 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:54:04 crc kubenswrapper[4773]: E0122 13:54:04.659631 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:54:19 crc kubenswrapper[4773]: I0122 13:54:19.660417 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:54:19 crc kubenswrapper[4773]: E0122 13:54:19.661130 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:54:30 crc kubenswrapper[4773]: I0122 13:54:30.658374 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:54:30 crc kubenswrapper[4773]: E0122 13:54:30.659521 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:54:45 crc kubenswrapper[4773]: I0122 13:54:45.659176 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:54:45 crc kubenswrapper[4773]: E0122 13:54:45.661335 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:54:59 crc kubenswrapper[4773]: I0122 13:54:59.657889 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:54:59 crc kubenswrapper[4773]: E0122 13:54:59.659101 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:55:11 crc kubenswrapper[4773]: I0122 13:55:11.658337 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:55:11 crc kubenswrapper[4773]: E0122 13:55:11.659055 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:55:25 crc kubenswrapper[4773]: I0122 13:55:25.659634 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:55:25 crc kubenswrapper[4773]: E0122 13:55:25.661237 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:55:39 crc kubenswrapper[4773]: I0122 13:55:39.659156 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:55:39 crc kubenswrapper[4773]: E0122 13:55:39.660324 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:55:54 crc kubenswrapper[4773]: I0122 13:55:54.658416 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:55:54 crc kubenswrapper[4773]: E0122 13:55:54.659247 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:56:00 crc kubenswrapper[4773]: I0122 13:56:00.727428 4773 generic.go:334] "Generic (PLEG): container finished" podID="69b32bbb-8065-4c41-91bb-3f13e9e321d0" containerID="d635c41e00c085db88c3c69a166485f45d9263dfedc7cbed265571c53aab3abc" exitCode=0 Jan 22 13:56:00 crc kubenswrapper[4773]: I0122 13:56:00.727629 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" event={"ID":"69b32bbb-8065-4c41-91bb-3f13e9e321d0","Type":"ContainerDied","Data":"d635c41e00c085db88c3c69a166485f45d9263dfedc7cbed265571c53aab3abc"} Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.221588 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.424894 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b2mv\" (UniqueName: \"kubernetes.io/projected/69b32bbb-8065-4c41-91bb-3f13e9e321d0-kube-api-access-8b2mv\") pod \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.425155 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-ssh-key-openstack-cell1\") pod \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.425240 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-inventory\") pod \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.426167 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-tripleo-cleanup-combined-ca-bundle\") pod \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\" (UID: \"69b32bbb-8065-4c41-91bb-3f13e9e321d0\") " Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.435589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "69b32bbb-8065-4c41-91bb-3f13e9e321d0" (UID: "69b32bbb-8065-4c41-91bb-3f13e9e321d0"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.436294 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69b32bbb-8065-4c41-91bb-3f13e9e321d0-kube-api-access-8b2mv" (OuterVolumeSpecName: "kube-api-access-8b2mv") pod "69b32bbb-8065-4c41-91bb-3f13e9e321d0" (UID: "69b32bbb-8065-4c41-91bb-3f13e9e321d0"). InnerVolumeSpecName "kube-api-access-8b2mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.452879 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "69b32bbb-8065-4c41-91bb-3f13e9e321d0" (UID: "69b32bbb-8065-4c41-91bb-3f13e9e321d0"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.455146 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-inventory" (OuterVolumeSpecName: "inventory") pod "69b32bbb-8065-4c41-91bb-3f13e9e321d0" (UID: "69b32bbb-8065-4c41-91bb-3f13e9e321d0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.528783 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b2mv\" (UniqueName: \"kubernetes.io/projected/69b32bbb-8065-4c41-91bb-3f13e9e321d0-kube-api-access-8b2mv\") on node \"crc\" DevicePath \"\"" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.528823 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.528833 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.528843 4773 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69b32bbb-8065-4c41-91bb-3f13e9e321d0-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.759154 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" event={"ID":"69b32bbb-8065-4c41-91bb-3f13e9e321d0","Type":"ContainerDied","Data":"0118fdb1f75003b6d85cc48dc54ef6b1b7ac77f1c8f34e0ef5d18e962ab3a020"} Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.759218 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0118fdb1f75003b6d85cc48dc54ef6b1b7ac77f1c8f34e0ef5d18e962ab3a020" Jan 22 13:56:02 crc kubenswrapper[4773]: I0122 13:56:02.759249 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.658566 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:56:08 crc kubenswrapper[4773]: E0122 13:56:08.659309 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.980448 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-r6264"] Jan 22 13:56:08 crc kubenswrapper[4773]: E0122 13:56:08.981001 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="extract-content" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.981024 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="extract-content" Jan 22 13:56:08 crc kubenswrapper[4773]: E0122 13:56:08.981049 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69b32bbb-8065-4c41-91bb-3f13e9e321d0" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.981059 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="69b32bbb-8065-4c41-91bb-3f13e9e321d0" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 22 13:56:08 crc kubenswrapper[4773]: E0122 13:56:08.981080 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="registry-server" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.981088 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="registry-server" Jan 22 13:56:08 crc kubenswrapper[4773]: E0122 13:56:08.981111 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="extract-utilities" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.981121 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="extract-utilities" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.981384 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="69b32bbb-8065-4c41-91bb-3f13e9e321d0" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.981427 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c2c7109-994b-4714-838c-af00edf00c2a" containerName="registry-server" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.982306 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.984733 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.984933 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.985340 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.985462 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 13:56:08 crc kubenswrapper[4773]: I0122 13:56:08.992408 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-r6264"] Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.171993 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk27z\" (UniqueName: \"kubernetes.io/projected/a4e419ee-625b-4922-b9d7-4aefc6eae65b-kube-api-access-zk27z\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.172159 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.172235 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.172319 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-inventory\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.274794 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk27z\" (UniqueName: \"kubernetes.io/projected/a4e419ee-625b-4922-b9d7-4aefc6eae65b-kube-api-access-zk27z\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.274857 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.274887 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.274943 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-inventory\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.281248 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-inventory\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.290490 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-ssh-key-openstack-cell1\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.291456 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.294249 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk27z\" (UniqueName: \"kubernetes.io/projected/a4e419ee-625b-4922-b9d7-4aefc6eae65b-kube-api-access-zk27z\") pod \"bootstrap-openstack-openstack-cell1-r6264\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.309397 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.888904 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-r6264"] Jan 22 13:56:09 crc kubenswrapper[4773]: I0122 13:56:09.899770 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 13:56:10 crc kubenswrapper[4773]: I0122 13:56:10.839034 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" event={"ID":"a4e419ee-625b-4922-b9d7-4aefc6eae65b","Type":"ContainerStarted","Data":"f33236b7de70601dd45c3dd98ff381c18ab0483d91e02ab4c5269a4f4cf55a6d"} Jan 22 13:56:10 crc kubenswrapper[4773]: I0122 13:56:10.839579 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" event={"ID":"a4e419ee-625b-4922-b9d7-4aefc6eae65b","Type":"ContainerStarted","Data":"7aad2a646d282221c9b60e0eae3429c4a2335e22b307dddbed04e5450454ee43"} Jan 22 13:56:10 crc kubenswrapper[4773]: I0122 13:56:10.860451 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" podStartSLOduration=2.220187217 podStartE2EDuration="2.860423928s" podCreationTimestamp="2026-01-22 13:56:08 +0000 UTC" firstStartedPulling="2026-01-22 13:56:09.89948916 +0000 UTC m=+7277.477604985" lastFinishedPulling="2026-01-22 13:56:10.539725871 +0000 UTC m=+7278.117841696" observedRunningTime="2026-01-22 13:56:10.858833593 +0000 UTC m=+7278.436949438" watchObservedRunningTime="2026-01-22 13:56:10.860423928 +0000 UTC m=+7278.438539753" Jan 22 13:56:21 crc kubenswrapper[4773]: I0122 13:56:21.659515 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:56:21 crc kubenswrapper[4773]: E0122 13:56:21.660376 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:56:33 crc kubenswrapper[4773]: I0122 13:56:33.658154 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:56:33 crc kubenswrapper[4773]: E0122 13:56:33.659181 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:56:44 crc kubenswrapper[4773]: I0122 13:56:44.658908 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:56:44 crc kubenswrapper[4773]: E0122 13:56:44.660182 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:56:56 crc kubenswrapper[4773]: I0122 13:56:56.658988 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:56:56 crc kubenswrapper[4773]: E0122 13:56:56.660607 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:57:10 crc kubenswrapper[4773]: I0122 13:57:10.659216 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:57:10 crc kubenswrapper[4773]: E0122 13:57:10.660593 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:57:14 crc kubenswrapper[4773]: I0122 13:57:14.034079 4773 scope.go:117] "RemoveContainer" containerID="452e03e9fdb87c14ccc8755e1643663deffad2bb08624d47551434d12b90e070" Jan 22 13:57:14 crc kubenswrapper[4773]: I0122 13:57:14.084329 4773 scope.go:117] "RemoveContainer" containerID="16e1f664c0706fa57a95e490f31dfff5be8d24370e569a4c30cb27aa14151e9a" Jan 22 13:57:14 crc kubenswrapper[4773]: I0122 13:57:14.129576 4773 scope.go:117] "RemoveContainer" containerID="11a1f825eb4e244ed052022bc302477d33b90d273c3c1e2329f2dc9e3ef740cb" Jan 22 13:57:22 crc kubenswrapper[4773]: I0122 13:57:22.667685 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:57:22 crc kubenswrapper[4773]: E0122 13:57:22.668675 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:57:37 crc kubenswrapper[4773]: I0122 13:57:37.658591 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:57:37 crc kubenswrapper[4773]: E0122 13:57:37.659501 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:57:51 crc kubenswrapper[4773]: I0122 13:57:51.658355 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:57:51 crc kubenswrapper[4773]: E0122 13:57:51.659265 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 13:58:04 crc kubenswrapper[4773]: I0122 13:58:04.658323 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 13:58:05 crc kubenswrapper[4773]: I0122 13:58:05.398768 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"3ed07aff4ff5fa2ed28234057c36f8999a1e1cd3208798180a3cb17cdd63263f"} Jan 22 13:59:25 crc kubenswrapper[4773]: I0122 13:59:25.702664 4773 generic.go:334] "Generic (PLEG): container finished" podID="a4e419ee-625b-4922-b9d7-4aefc6eae65b" containerID="f33236b7de70601dd45c3dd98ff381c18ab0483d91e02ab4c5269a4f4cf55a6d" exitCode=0 Jan 22 13:59:25 crc kubenswrapper[4773]: I0122 13:59:25.702730 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" event={"ID":"a4e419ee-625b-4922-b9d7-4aefc6eae65b","Type":"ContainerDied","Data":"f33236b7de70601dd45c3dd98ff381c18ab0483d91e02ab4c5269a4f4cf55a6d"} Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.147248 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.169122 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-inventory\") pod \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.169364 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-bootstrap-combined-ca-bundle\") pod \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.169421 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk27z\" (UniqueName: \"kubernetes.io/projected/a4e419ee-625b-4922-b9d7-4aefc6eae65b-kube-api-access-zk27z\") pod \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.169539 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-ssh-key-openstack-cell1\") pod \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\" (UID: \"a4e419ee-625b-4922-b9d7-4aefc6eae65b\") " Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.183746 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4e419ee-625b-4922-b9d7-4aefc6eae65b-kube-api-access-zk27z" (OuterVolumeSpecName: "kube-api-access-zk27z") pod "a4e419ee-625b-4922-b9d7-4aefc6eae65b" (UID: "a4e419ee-625b-4922-b9d7-4aefc6eae65b"). InnerVolumeSpecName "kube-api-access-zk27z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.196204 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a4e419ee-625b-4922-b9d7-4aefc6eae65b" (UID: "a4e419ee-625b-4922-b9d7-4aefc6eae65b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.207376 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-inventory" (OuterVolumeSpecName: "inventory") pod "a4e419ee-625b-4922-b9d7-4aefc6eae65b" (UID: "a4e419ee-625b-4922-b9d7-4aefc6eae65b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.220744 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "a4e419ee-625b-4922-b9d7-4aefc6eae65b" (UID: "a4e419ee-625b-4922-b9d7-4aefc6eae65b"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.273270 4773 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.273322 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk27z\" (UniqueName: \"kubernetes.io/projected/a4e419ee-625b-4922-b9d7-4aefc6eae65b-kube-api-access-zk27z\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.273334 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.273346 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4e419ee-625b-4922-b9d7-4aefc6eae65b-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.724043 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" event={"ID":"a4e419ee-625b-4922-b9d7-4aefc6eae65b","Type":"ContainerDied","Data":"7aad2a646d282221c9b60e0eae3429c4a2335e22b307dddbed04e5450454ee43"} Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.724126 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7aad2a646d282221c9b60e0eae3429c4a2335e22b307dddbed04e5450454ee43" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.724190 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-r6264" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.823930 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-pg6sn"] Jan 22 13:59:27 crc kubenswrapper[4773]: E0122 13:59:27.824652 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e419ee-625b-4922-b9d7-4aefc6eae65b" containerName="bootstrap-openstack-openstack-cell1" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.824670 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e419ee-625b-4922-b9d7-4aefc6eae65b" containerName="bootstrap-openstack-openstack-cell1" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.824886 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4e419ee-625b-4922-b9d7-4aefc6eae65b" containerName="bootstrap-openstack-openstack-cell1" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.825760 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.827869 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.833630 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-pg6sn"] Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.836564 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.836696 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.838210 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.891426 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-inventory\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.891538 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.891614 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqgnf\" (UniqueName: \"kubernetes.io/projected/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-kube-api-access-pqgnf\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.993998 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-inventory\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.994113 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.994187 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqgnf\" (UniqueName: \"kubernetes.io/projected/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-kube-api-access-pqgnf\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:27 crc kubenswrapper[4773]: I0122 13:59:27.998447 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-inventory\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:28 crc kubenswrapper[4773]: I0122 13:59:28.000553 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-ssh-key-openstack-cell1\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:28 crc kubenswrapper[4773]: I0122 13:59:28.012485 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqgnf\" (UniqueName: \"kubernetes.io/projected/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-kube-api-access-pqgnf\") pod \"download-cache-openstack-openstack-cell1-pg6sn\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:28 crc kubenswrapper[4773]: I0122 13:59:28.150863 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 13:59:28 crc kubenswrapper[4773]: I0122 13:59:28.687857 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-pg6sn"] Jan 22 13:59:28 crc kubenswrapper[4773]: I0122 13:59:28.738220 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" event={"ID":"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899","Type":"ContainerStarted","Data":"a35fc63979dd35f42d2880a56c7ee280f2c48cf869648626ee9b3d97b956b63d"} Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.769309 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" event={"ID":"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899","Type":"ContainerStarted","Data":"6521a9fcfca4d849f97544386872b1c6e197af43ff905d3a9b7dfb3d87d17f0f"} Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.781277 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-prqss"] Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.784449 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.814420 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-prqss"] Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.833807 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" podStartSLOduration=2.485627581 podStartE2EDuration="3.82677851s" podCreationTimestamp="2026-01-22 13:59:27 +0000 UTC" firstStartedPulling="2026-01-22 13:59:28.6940108 +0000 UTC m=+7476.272126635" lastFinishedPulling="2026-01-22 13:59:30.035161739 +0000 UTC m=+7477.613277564" observedRunningTime="2026-01-22 13:59:30.800510318 +0000 UTC m=+7478.378626143" watchObservedRunningTime="2026-01-22 13:59:30.82677851 +0000 UTC m=+7478.404894335" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.863582 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-utilities\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.864091 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5j4z\" (UniqueName: \"kubernetes.io/projected/22f30e95-21f3-48fa-9d21-b669aab7bc06-kube-api-access-g5j4z\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.864144 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-catalog-content\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.966456 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-catalog-content\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.966680 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-utilities\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.966754 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5j4z\" (UniqueName: \"kubernetes.io/projected/22f30e95-21f3-48fa-9d21-b669aab7bc06-kube-api-access-g5j4z\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.967086 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-catalog-content\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.967242 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-utilities\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:30 crc kubenswrapper[4773]: I0122 13:59:30.985463 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5j4z\" (UniqueName: \"kubernetes.io/projected/22f30e95-21f3-48fa-9d21-b669aab7bc06-kube-api-access-g5j4z\") pod \"redhat-marketplace-prqss\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:31 crc kubenswrapper[4773]: I0122 13:59:31.132045 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:31 crc kubenswrapper[4773]: I0122 13:59:31.927865 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-prqss"] Jan 22 13:59:32 crc kubenswrapper[4773]: I0122 13:59:32.788104 4773 generic.go:334] "Generic (PLEG): container finished" podID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerID="26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b" exitCode=0 Jan 22 13:59:32 crc kubenswrapper[4773]: I0122 13:59:32.788423 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerDied","Data":"26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b"} Jan 22 13:59:32 crc kubenswrapper[4773]: I0122 13:59:32.788456 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerStarted","Data":"15abb6341ed0cf602a1b3281e1b1acc4f2247f6240d8d7703eac6ed3845d9f4a"} Jan 22 13:59:33 crc kubenswrapper[4773]: I0122 13:59:33.800269 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerStarted","Data":"483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2"} Jan 22 13:59:34 crc kubenswrapper[4773]: I0122 13:59:34.815552 4773 generic.go:334] "Generic (PLEG): container finished" podID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerID="483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2" exitCode=0 Jan 22 13:59:34 crc kubenswrapper[4773]: I0122 13:59:34.815644 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerDied","Data":"483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2"} Jan 22 13:59:36 crc kubenswrapper[4773]: I0122 13:59:36.837540 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerStarted","Data":"d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759"} Jan 22 13:59:36 crc kubenswrapper[4773]: I0122 13:59:36.862140 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-prqss" podStartSLOduration=4.137362729 podStartE2EDuration="6.862117387s" podCreationTimestamp="2026-01-22 13:59:30 +0000 UTC" firstStartedPulling="2026-01-22 13:59:32.795769139 +0000 UTC m=+7480.373884964" lastFinishedPulling="2026-01-22 13:59:35.520523777 +0000 UTC m=+7483.098639622" observedRunningTime="2026-01-22 13:59:36.854198143 +0000 UTC m=+7484.432313968" watchObservedRunningTime="2026-01-22 13:59:36.862117387 +0000 UTC m=+7484.440233212" Jan 22 13:59:41 crc kubenswrapper[4773]: I0122 13:59:41.133521 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:41 crc kubenswrapper[4773]: I0122 13:59:41.134144 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:41 crc kubenswrapper[4773]: I0122 13:59:41.190715 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:41 crc kubenswrapper[4773]: I0122 13:59:41.966729 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:42 crc kubenswrapper[4773]: I0122 13:59:42.020390 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-prqss"] Jan 22 13:59:43 crc kubenswrapper[4773]: I0122 13:59:43.911205 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-prqss" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="registry-server" containerID="cri-o://d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759" gracePeriod=2 Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.896355 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.928360 4773 generic.go:334] "Generic (PLEG): container finished" podID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerID="d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759" exitCode=0 Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.928419 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerDied","Data":"d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759"} Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.928455 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-prqss" event={"ID":"22f30e95-21f3-48fa-9d21-b669aab7bc06","Type":"ContainerDied","Data":"15abb6341ed0cf602a1b3281e1b1acc4f2247f6240d8d7703eac6ed3845d9f4a"} Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.928477 4773 scope.go:117] "RemoveContainer" containerID="d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759" Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.928655 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-prqss" Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.965946 4773 scope.go:117] "RemoveContainer" containerID="483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2" Jan 22 13:59:44 crc kubenswrapper[4773]: I0122 13:59:44.989341 4773 scope.go:117] "RemoveContainer" containerID="26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.002473 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5j4z\" (UniqueName: \"kubernetes.io/projected/22f30e95-21f3-48fa-9d21-b669aab7bc06-kube-api-access-g5j4z\") pod \"22f30e95-21f3-48fa-9d21-b669aab7bc06\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.002571 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-utilities\") pod \"22f30e95-21f3-48fa-9d21-b669aab7bc06\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.002599 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-catalog-content\") pod \"22f30e95-21f3-48fa-9d21-b669aab7bc06\" (UID: \"22f30e95-21f3-48fa-9d21-b669aab7bc06\") " Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.004247 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-utilities" (OuterVolumeSpecName: "utilities") pod "22f30e95-21f3-48fa-9d21-b669aab7bc06" (UID: "22f30e95-21f3-48fa-9d21-b669aab7bc06"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.009130 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f30e95-21f3-48fa-9d21-b669aab7bc06-kube-api-access-g5j4z" (OuterVolumeSpecName: "kube-api-access-g5j4z") pod "22f30e95-21f3-48fa-9d21-b669aab7bc06" (UID: "22f30e95-21f3-48fa-9d21-b669aab7bc06"). InnerVolumeSpecName "kube-api-access-g5j4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.027654 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22f30e95-21f3-48fa-9d21-b669aab7bc06" (UID: "22f30e95-21f3-48fa-9d21-b669aab7bc06"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.098157 4773 scope.go:117] "RemoveContainer" containerID="d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759" Jan 22 13:59:45 crc kubenswrapper[4773]: E0122 13:59:45.099139 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759\": container with ID starting with d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759 not found: ID does not exist" containerID="d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.099217 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759"} err="failed to get container status \"d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759\": rpc error: code = NotFound desc = could not find container \"d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759\": container with ID starting with d1dbc7d7f6a733fecb6d59988bf08f5665bf9dea9521b69c576a8cfb762ae759 not found: ID does not exist" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.099258 4773 scope.go:117] "RemoveContainer" containerID="483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2" Jan 22 13:59:45 crc kubenswrapper[4773]: E0122 13:59:45.101648 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2\": container with ID starting with 483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2 not found: ID does not exist" containerID="483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.101926 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2"} err="failed to get container status \"483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2\": rpc error: code = NotFound desc = could not find container \"483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2\": container with ID starting with 483dbe88557bae9f4dd3fe52ab08c84a464be2dc0a2236eee4f171d9a905e3d2 not found: ID does not exist" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.101964 4773 scope.go:117] "RemoveContainer" containerID="26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b" Jan 22 13:59:45 crc kubenswrapper[4773]: E0122 13:59:45.102427 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b\": container with ID starting with 26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b not found: ID does not exist" containerID="26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.102457 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b"} err="failed to get container status \"26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b\": rpc error: code = NotFound desc = could not find container \"26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b\": container with ID starting with 26f0f45025b2a59aabf0b9863b8db1dfdb0ef330bed46cd381f62167650d757b not found: ID does not exist" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.105632 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5j4z\" (UniqueName: \"kubernetes.io/projected/22f30e95-21f3-48fa-9d21-b669aab7bc06-kube-api-access-g5j4z\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.105669 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.105687 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22f30e95-21f3-48fa-9d21-b669aab7bc06-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.266909 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-prqss"] Jan 22 13:59:45 crc kubenswrapper[4773]: I0122 13:59:45.278555 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-prqss"] Jan 22 13:59:46 crc kubenswrapper[4773]: I0122 13:59:46.670326 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" path="/var/lib/kubelet/pods/22f30e95-21f3-48fa-9d21-b669aab7bc06/volumes" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.166108 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp"] Jan 22 14:00:00 crc kubenswrapper[4773]: E0122 14:00:00.167113 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="registry-server" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.167126 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="registry-server" Jan 22 14:00:00 crc kubenswrapper[4773]: E0122 14:00:00.167155 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="extract-utilities" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.167161 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="extract-utilities" Jan 22 14:00:00 crc kubenswrapper[4773]: E0122 14:00:00.167181 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="extract-content" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.167189 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="extract-content" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.167467 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f30e95-21f3-48fa-9d21-b669aab7bc06" containerName="registry-server" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.168315 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.170632 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.172740 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.178505 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp"] Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.268071 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpccn\" (UniqueName: \"kubernetes.io/projected/994bc68e-a0f4-4085-a0df-f647dc8c462c-kube-api-access-kpccn\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.268526 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/994bc68e-a0f4-4085-a0df-f647dc8c462c-config-volume\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.268815 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/994bc68e-a0f4-4085-a0df-f647dc8c462c-secret-volume\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.371334 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/994bc68e-a0f4-4085-a0df-f647dc8c462c-config-volume\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.371414 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/994bc68e-a0f4-4085-a0df-f647dc8c462c-secret-volume\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.371460 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpccn\" (UniqueName: \"kubernetes.io/projected/994bc68e-a0f4-4085-a0df-f647dc8c462c-kube-api-access-kpccn\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.372332 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/994bc68e-a0f4-4085-a0df-f647dc8c462c-config-volume\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.387320 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/994bc68e-a0f4-4085-a0df-f647dc8c462c-secret-volume\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.391689 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpccn\" (UniqueName: \"kubernetes.io/projected/994bc68e-a0f4-4085-a0df-f647dc8c462c-kube-api-access-kpccn\") pod \"collect-profiles-29484840-q28qp\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:00 crc kubenswrapper[4773]: I0122 14:00:00.504223 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:01 crc kubenswrapper[4773]: I0122 14:00:01.038376 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp"] Jan 22 14:00:01 crc kubenswrapper[4773]: I0122 14:00:01.094276 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" event={"ID":"994bc68e-a0f4-4085-a0df-f647dc8c462c","Type":"ContainerStarted","Data":"5497f483d5f4ee58aad44129ba863b9cc50cdd2a67299aa3febc4a196226ed20"} Jan 22 14:00:02 crc kubenswrapper[4773]: I0122 14:00:02.104762 4773 generic.go:334] "Generic (PLEG): container finished" podID="994bc68e-a0f4-4085-a0df-f647dc8c462c" containerID="3d7e4dbaa2ed016e4d71f63074864e1db6834f31f392eb6317abfedba9f24df7" exitCode=0 Jan 22 14:00:02 crc kubenswrapper[4773]: I0122 14:00:02.104875 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" event={"ID":"994bc68e-a0f4-4085-a0df-f647dc8c462c","Type":"ContainerDied","Data":"3d7e4dbaa2ed016e4d71f63074864e1db6834f31f392eb6317abfedba9f24df7"} Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.524726 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.661365 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/994bc68e-a0f4-4085-a0df-f647dc8c462c-secret-volume\") pod \"994bc68e-a0f4-4085-a0df-f647dc8c462c\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.661512 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpccn\" (UniqueName: \"kubernetes.io/projected/994bc68e-a0f4-4085-a0df-f647dc8c462c-kube-api-access-kpccn\") pod \"994bc68e-a0f4-4085-a0df-f647dc8c462c\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.661580 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/994bc68e-a0f4-4085-a0df-f647dc8c462c-config-volume\") pod \"994bc68e-a0f4-4085-a0df-f647dc8c462c\" (UID: \"994bc68e-a0f4-4085-a0df-f647dc8c462c\") " Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.662171 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/994bc68e-a0f4-4085-a0df-f647dc8c462c-config-volume" (OuterVolumeSpecName: "config-volume") pod "994bc68e-a0f4-4085-a0df-f647dc8c462c" (UID: "994bc68e-a0f4-4085-a0df-f647dc8c462c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.662602 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/994bc68e-a0f4-4085-a0df-f647dc8c462c-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.670279 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/994bc68e-a0f4-4085-a0df-f647dc8c462c-kube-api-access-kpccn" (OuterVolumeSpecName: "kube-api-access-kpccn") pod "994bc68e-a0f4-4085-a0df-f647dc8c462c" (UID: "994bc68e-a0f4-4085-a0df-f647dc8c462c"). InnerVolumeSpecName "kube-api-access-kpccn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.670552 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994bc68e-a0f4-4085-a0df-f647dc8c462c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "994bc68e-a0f4-4085-a0df-f647dc8c462c" (UID: "994bc68e-a0f4-4085-a0df-f647dc8c462c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.764802 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/994bc68e-a0f4-4085-a0df-f647dc8c462c-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:00:03 crc kubenswrapper[4773]: I0122 14:00:03.764863 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpccn\" (UniqueName: \"kubernetes.io/projected/994bc68e-a0f4-4085-a0df-f647dc8c462c-kube-api-access-kpccn\") on node \"crc\" DevicePath \"\"" Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.074535 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.074827 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.126754 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" event={"ID":"994bc68e-a0f4-4085-a0df-f647dc8c462c","Type":"ContainerDied","Data":"5497f483d5f4ee58aad44129ba863b9cc50cdd2a67299aa3febc4a196226ed20"} Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.126810 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5497f483d5f4ee58aad44129ba863b9cc50cdd2a67299aa3febc4a196226ed20" Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.126821 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp" Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.606028 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs"] Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.617090 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484795-p5vcs"] Jan 22 14:00:04 crc kubenswrapper[4773]: I0122 14:00:04.670244 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaa1bb3f-ee17-49fc-b936-2059f38308a1" path="/var/lib/kubelet/pods/eaa1bb3f-ee17-49fc-b936-2059f38308a1/volumes" Jan 22 14:00:14 crc kubenswrapper[4773]: I0122 14:00:14.285616 4773 scope.go:117] "RemoveContainer" containerID="7e5a6cbfd6311a46afa3f8a28578f2cb4da026ca6b84322a25210a3dcd9f8b37" Jan 22 14:00:34 crc kubenswrapper[4773]: I0122 14:00:34.074563 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:00:34 crc kubenswrapper[4773]: I0122 14:00:34.075082 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.159984 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29484841-g7cvq"] Jan 22 14:01:00 crc kubenswrapper[4773]: E0122 14:01:00.161278 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994bc68e-a0f4-4085-a0df-f647dc8c462c" containerName="collect-profiles" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.161312 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="994bc68e-a0f4-4085-a0df-f647dc8c462c" containerName="collect-profiles" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.161667 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="994bc68e-a0f4-4085-a0df-f647dc8c462c" containerName="collect-profiles" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.162806 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.178254 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29484841-g7cvq"] Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.239201 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4269\" (UniqueName: \"kubernetes.io/projected/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-kube-api-access-k4269\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.239360 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-combined-ca-bundle\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.239803 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-config-data\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.239890 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-fernet-keys\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.340895 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-config-data\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.340958 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-fernet-keys\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.341014 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4269\" (UniqueName: \"kubernetes.io/projected/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-kube-api-access-k4269\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.341088 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-combined-ca-bundle\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.348817 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-fernet-keys\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.350032 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-combined-ca-bundle\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.358312 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-config-data\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.361962 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4269\" (UniqueName: \"kubernetes.io/projected/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-kube-api-access-k4269\") pod \"keystone-cron-29484841-g7cvq\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.499069 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:00 crc kubenswrapper[4773]: I0122 14:01:00.974603 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29484841-g7cvq"] Jan 22 14:01:00 crc kubenswrapper[4773]: W0122 14:01:00.977714 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podadfe4107_4d17_40d6_8531_9e1a2a2f7ec8.slice/crio-661851d845fb0b06582214068ef3d750152b34563d17e49be482168f1978e306 WatchSource:0}: Error finding container 661851d845fb0b06582214068ef3d750152b34563d17e49be482168f1978e306: Status 404 returned error can't find the container with id 661851d845fb0b06582214068ef3d750152b34563d17e49be482168f1978e306 Jan 22 14:01:01 crc kubenswrapper[4773]: I0122 14:01:01.723924 4773 generic.go:334] "Generic (PLEG): container finished" podID="537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" containerID="6521a9fcfca4d849f97544386872b1c6e197af43ff905d3a9b7dfb3d87d17f0f" exitCode=0 Jan 22 14:01:01 crc kubenswrapper[4773]: I0122 14:01:01.724064 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" event={"ID":"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899","Type":"ContainerDied","Data":"6521a9fcfca4d849f97544386872b1c6e197af43ff905d3a9b7dfb3d87d17f0f"} Jan 22 14:01:01 crc kubenswrapper[4773]: I0122 14:01:01.726879 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484841-g7cvq" event={"ID":"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8","Type":"ContainerStarted","Data":"22d1dcccb262de3d20a797aeb33f7822005e3047dd46a745032a0079bf9e97c2"} Jan 22 14:01:01 crc kubenswrapper[4773]: I0122 14:01:01.726911 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484841-g7cvq" event={"ID":"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8","Type":"ContainerStarted","Data":"661851d845fb0b06582214068ef3d750152b34563d17e49be482168f1978e306"} Jan 22 14:01:01 crc kubenswrapper[4773]: I0122 14:01:01.781859 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29484841-g7cvq" podStartSLOduration=1.781836645 podStartE2EDuration="1.781836645s" podCreationTimestamp="2026-01-22 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:01:01.774904889 +0000 UTC m=+7569.353020714" watchObservedRunningTime="2026-01-22 14:01:01.781836645 +0000 UTC m=+7569.359952470" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.272227 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.413195 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqgnf\" (UniqueName: \"kubernetes.io/projected/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-kube-api-access-pqgnf\") pod \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.413259 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-ssh-key-openstack-cell1\") pod \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.413347 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-inventory\") pod \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\" (UID: \"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899\") " Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.436676 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-kube-api-access-pqgnf" (OuterVolumeSpecName: "kube-api-access-pqgnf") pod "537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" (UID: "537d68a1-fe75-4e50-8c4b-c9e4c0ab7899"). InnerVolumeSpecName "kube-api-access-pqgnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.445235 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-inventory" (OuterVolumeSpecName: "inventory") pod "537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" (UID: "537d68a1-fe75-4e50-8c4b-c9e4c0ab7899"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.449862 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" (UID: "537d68a1-fe75-4e50-8c4b-c9e4c0ab7899"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.515193 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqgnf\" (UniqueName: \"kubernetes.io/projected/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-kube-api-access-pqgnf\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.515225 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.515235 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/537d68a1-fe75-4e50-8c4b-c9e4c0ab7899-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.750475 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" event={"ID":"537d68a1-fe75-4e50-8c4b-c9e4c0ab7899","Type":"ContainerDied","Data":"a35fc63979dd35f42d2880a56c7ee280f2c48cf869648626ee9b3d97b956b63d"} Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.750537 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a35fc63979dd35f42d2880a56c7ee280f2c48cf869648626ee9b3d97b956b63d" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.750497 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-pg6sn" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.753876 4773 generic.go:334] "Generic (PLEG): container finished" podID="adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" containerID="22d1dcccb262de3d20a797aeb33f7822005e3047dd46a745032a0079bf9e97c2" exitCode=0 Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.753932 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484841-g7cvq" event={"ID":"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8","Type":"ContainerDied","Data":"22d1dcccb262de3d20a797aeb33f7822005e3047dd46a745032a0079bf9e97c2"} Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.869504 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-lpznb"] Jan 22 14:01:03 crc kubenswrapper[4773]: E0122 14:01:03.870046 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" containerName="download-cache-openstack-openstack-cell1" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.870066 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" containerName="download-cache-openstack-openstack-cell1" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.870246 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="537d68a1-fe75-4e50-8c4b-c9e4c0ab7899" containerName="download-cache-openstack-openstack-cell1" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.871200 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.878968 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-lpznb"] Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.908493 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.908817 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.908999 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.909206 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.932563 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl7jv\" (UniqueName: \"kubernetes.io/projected/df86d55f-325a-4df7-9275-8b64fa37a759-kube-api-access-fl7jv\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.932721 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-inventory\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:03 crc kubenswrapper[4773]: I0122 14:01:03.932766 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.035063 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-inventory\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.035151 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.035515 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl7jv\" (UniqueName: \"kubernetes.io/projected/df86d55f-325a-4df7-9275-8b64fa37a759-kube-api-access-fl7jv\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.039729 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-inventory\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.039799 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-ssh-key-openstack-cell1\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.056188 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl7jv\" (UniqueName: \"kubernetes.io/projected/df86d55f-325a-4df7-9275-8b64fa37a759-kube-api-access-fl7jv\") pod \"configure-network-openstack-openstack-cell1-lpznb\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.074225 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.074500 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.074541 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.075502 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3ed07aff4ff5fa2ed28234057c36f8999a1e1cd3208798180a3cb17cdd63263f"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.075562 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://3ed07aff4ff5fa2ed28234057c36f8999a1e1cd3208798180a3cb17cdd63263f" gracePeriod=600 Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.223310 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.769027 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="3ed07aff4ff5fa2ed28234057c36f8999a1e1cd3208798180a3cb17cdd63263f" exitCode=0 Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.769343 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"3ed07aff4ff5fa2ed28234057c36f8999a1e1cd3208798180a3cb17cdd63263f"} Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.769722 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e"} Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.769751 4773 scope.go:117] "RemoveContainer" containerID="9df3f828f01dced3a430d20bf1fd25694c6c11c9cb58e9e3cca0d42595547c96" Jan 22 14:01:04 crc kubenswrapper[4773]: I0122 14:01:04.837623 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-lpznb"] Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.195745 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.362952 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4269\" (UniqueName: \"kubernetes.io/projected/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-kube-api-access-k4269\") pod \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.363018 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-combined-ca-bundle\") pod \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.363062 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-config-data\") pod \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.363275 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-fernet-keys\") pod \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\" (UID: \"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8\") " Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.383677 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" (UID: "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.384638 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-kube-api-access-k4269" (OuterVolumeSpecName: "kube-api-access-k4269") pod "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" (UID: "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8"). InnerVolumeSpecName "kube-api-access-k4269". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.398893 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" (UID: "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.433203 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-config-data" (OuterVolumeSpecName: "config-data") pod "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" (UID: "adfe4107-4d17-40d6-8531-9e1a2a2f7ec8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.465656 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4269\" (UniqueName: \"kubernetes.io/projected/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-kube-api-access-k4269\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.465912 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.465927 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.465938 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/adfe4107-4d17-40d6-8531-9e1a2a2f7ec8-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.786445 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484841-g7cvq" event={"ID":"adfe4107-4d17-40d6-8531-9e1a2a2f7ec8","Type":"ContainerDied","Data":"661851d845fb0b06582214068ef3d750152b34563d17e49be482168f1978e306"} Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.786782 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="661851d845fb0b06582214068ef3d750152b34563d17e49be482168f1978e306" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.786474 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484841-g7cvq" Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.788763 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" event={"ID":"df86d55f-325a-4df7-9275-8b64fa37a759","Type":"ContainerStarted","Data":"eaed428ed73b5116b91b676050ebb1a95716f946260e0b39d69e68ce8fa1fb10"} Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.788812 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" event={"ID":"df86d55f-325a-4df7-9275-8b64fa37a759","Type":"ContainerStarted","Data":"bb6a48cc602ad0918c195410033b34c5256260c5c454054c3b096de912a2b344"} Jan 22 14:01:05 crc kubenswrapper[4773]: I0122 14:01:05.812349 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" podStartSLOduration=2.249498687 podStartE2EDuration="2.812322689s" podCreationTimestamp="2026-01-22 14:01:03 +0000 UTC" firstStartedPulling="2026-01-22 14:01:04.840534204 +0000 UTC m=+7572.418650029" lastFinishedPulling="2026-01-22 14:01:05.403358216 +0000 UTC m=+7572.981474031" observedRunningTime="2026-01-22 14:01:05.806645618 +0000 UTC m=+7573.384761443" watchObservedRunningTime="2026-01-22 14:01:05.812322689 +0000 UTC m=+7573.390438514" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.809545 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fng96"] Jan 22 14:02:22 crc kubenswrapper[4773]: E0122 14:02:22.811457 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" containerName="keystone-cron" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.811481 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" containerName="keystone-cron" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.811664 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="adfe4107-4d17-40d6-8531-9e1a2a2f7ec8" containerName="keystone-cron" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.813525 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.825844 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fng96"] Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.981065 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwfwc\" (UniqueName: \"kubernetes.io/projected/b29a0d48-e08e-44e3-914a-983f8e3dff79-kube-api-access-zwfwc\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.981353 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-utilities\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:22 crc kubenswrapper[4773]: I0122 14:02:22.981750 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-catalog-content\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.084161 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwfwc\" (UniqueName: \"kubernetes.io/projected/b29a0d48-e08e-44e3-914a-983f8e3dff79-kube-api-access-zwfwc\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.084668 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-utilities\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.084889 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-catalog-content\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.085780 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-utilities\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.085995 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-catalog-content\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.108791 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwfwc\" (UniqueName: \"kubernetes.io/projected/b29a0d48-e08e-44e3-914a-983f8e3dff79-kube-api-access-zwfwc\") pod \"redhat-operators-fng96\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.184006 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.650812 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fng96"] Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.670798 4773 generic.go:334] "Generic (PLEG): container finished" podID="df86d55f-325a-4df7-9275-8b64fa37a759" containerID="eaed428ed73b5116b91b676050ebb1a95716f946260e0b39d69e68ce8fa1fb10" exitCode=0 Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.670870 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" event={"ID":"df86d55f-325a-4df7-9275-8b64fa37a759","Type":"ContainerDied","Data":"eaed428ed73b5116b91b676050ebb1a95716f946260e0b39d69e68ce8fa1fb10"} Jan 22 14:02:23 crc kubenswrapper[4773]: I0122 14:02:23.672576 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerStarted","Data":"7791b101eb38cb3f1fd5b7d4d72623219a70dde38da378850c09a07f25856dd9"} Jan 22 14:02:24 crc kubenswrapper[4773]: I0122 14:02:24.682163 4773 generic.go:334] "Generic (PLEG): container finished" podID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerID="7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4" exitCode=0 Jan 22 14:02:24 crc kubenswrapper[4773]: I0122 14:02:24.682325 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerDied","Data":"7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4"} Jan 22 14:02:24 crc kubenswrapper[4773]: I0122 14:02:24.685023 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.180712 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.343509 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-ssh-key-openstack-cell1\") pod \"df86d55f-325a-4df7-9275-8b64fa37a759\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.344090 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fl7jv\" (UniqueName: \"kubernetes.io/projected/df86d55f-325a-4df7-9275-8b64fa37a759-kube-api-access-fl7jv\") pod \"df86d55f-325a-4df7-9275-8b64fa37a759\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.344565 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-inventory\") pod \"df86d55f-325a-4df7-9275-8b64fa37a759\" (UID: \"df86d55f-325a-4df7-9275-8b64fa37a759\") " Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.349619 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df86d55f-325a-4df7-9275-8b64fa37a759-kube-api-access-fl7jv" (OuterVolumeSpecName: "kube-api-access-fl7jv") pod "df86d55f-325a-4df7-9275-8b64fa37a759" (UID: "df86d55f-325a-4df7-9275-8b64fa37a759"). InnerVolumeSpecName "kube-api-access-fl7jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.379745 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-inventory" (OuterVolumeSpecName: "inventory") pod "df86d55f-325a-4df7-9275-8b64fa37a759" (UID: "df86d55f-325a-4df7-9275-8b64fa37a759"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.385463 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "df86d55f-325a-4df7-9275-8b64fa37a759" (UID: "df86d55f-325a-4df7-9275-8b64fa37a759"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.449968 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fl7jv\" (UniqueName: \"kubernetes.io/projected/df86d55f-325a-4df7-9275-8b64fa37a759-kube-api-access-fl7jv\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.450023 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.450044 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/df86d55f-325a-4df7-9275-8b64fa37a759-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.702263 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.702275 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-lpznb" event={"ID":"df86d55f-325a-4df7-9275-8b64fa37a759","Type":"ContainerDied","Data":"bb6a48cc602ad0918c195410033b34c5256260c5c454054c3b096de912a2b344"} Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.702422 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb6a48cc602ad0918c195410033b34c5256260c5c454054c3b096de912a2b344" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.708256 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerStarted","Data":"c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35"} Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.790210 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-w5d4l"] Jan 22 14:02:25 crc kubenswrapper[4773]: E0122 14:02:25.790974 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df86d55f-325a-4df7-9275-8b64fa37a759" containerName="configure-network-openstack-openstack-cell1" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.790991 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="df86d55f-325a-4df7-9275-8b64fa37a759" containerName="configure-network-openstack-openstack-cell1" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.791188 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="df86d55f-325a-4df7-9275-8b64fa37a759" containerName="configure-network-openstack-openstack-cell1" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.791934 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.798297 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.798775 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.799321 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.832973 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.845531 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-w5d4l"] Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.861872 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2thqp\" (UniqueName: \"kubernetes.io/projected/35ac9085-7569-42c7-a13f-a4c1101dc438-kube-api-access-2thqp\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.862015 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-inventory\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.862083 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.964915 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-inventory\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.965020 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.965117 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2thqp\" (UniqueName: \"kubernetes.io/projected/35ac9085-7569-42c7-a13f-a4c1101dc438-kube-api-access-2thqp\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.972094 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-ssh-key-openstack-cell1\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.974322 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-inventory\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:25 crc kubenswrapper[4773]: I0122 14:02:25.980711 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2thqp\" (UniqueName: \"kubernetes.io/projected/35ac9085-7569-42c7-a13f-a4c1101dc438-kube-api-access-2thqp\") pod \"validate-network-openstack-openstack-cell1-w5d4l\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:26 crc kubenswrapper[4773]: I0122 14:02:26.147875 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:26 crc kubenswrapper[4773]: I0122 14:02:26.762556 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-w5d4l"] Jan 22 14:02:26 crc kubenswrapper[4773]: W0122 14:02:26.772955 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35ac9085_7569_42c7_a13f_a4c1101dc438.slice/crio-1fb0c63382c6521e2ecb251f8c50b02eb183a4385e7689d511fc56720b142dff WatchSource:0}: Error finding container 1fb0c63382c6521e2ecb251f8c50b02eb183a4385e7689d511fc56720b142dff: Status 404 returned error can't find the container with id 1fb0c63382c6521e2ecb251f8c50b02eb183a4385e7689d511fc56720b142dff Jan 22 14:02:27 crc kubenswrapper[4773]: I0122 14:02:27.730607 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" event={"ID":"35ac9085-7569-42c7-a13f-a4c1101dc438","Type":"ContainerStarted","Data":"1fb0c63382c6521e2ecb251f8c50b02eb183a4385e7689d511fc56720b142dff"} Jan 22 14:02:28 crc kubenswrapper[4773]: I0122 14:02:28.754199 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" event={"ID":"35ac9085-7569-42c7-a13f-a4c1101dc438","Type":"ContainerStarted","Data":"07308281d46bdcdb634ed6c8c856e2d41ea4b2d31aa4614b0682bce506abcde6"} Jan 22 14:02:28 crc kubenswrapper[4773]: I0122 14:02:28.782682 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" podStartSLOduration=3.140447028 podStartE2EDuration="3.782657956s" podCreationTimestamp="2026-01-22 14:02:25 +0000 UTC" firstStartedPulling="2026-01-22 14:02:26.776024953 +0000 UTC m=+7654.354140808" lastFinishedPulling="2026-01-22 14:02:27.418235921 +0000 UTC m=+7654.996351736" observedRunningTime="2026-01-22 14:02:28.773783965 +0000 UTC m=+7656.351899790" watchObservedRunningTime="2026-01-22 14:02:28.782657956 +0000 UTC m=+7656.360773781" Jan 22 14:02:29 crc kubenswrapper[4773]: I0122 14:02:29.764875 4773 generic.go:334] "Generic (PLEG): container finished" podID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerID="c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35" exitCode=0 Jan 22 14:02:29 crc kubenswrapper[4773]: I0122 14:02:29.765918 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerDied","Data":"c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35"} Jan 22 14:02:30 crc kubenswrapper[4773]: I0122 14:02:30.777913 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerStarted","Data":"8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408"} Jan 22 14:02:30 crc kubenswrapper[4773]: I0122 14:02:30.797681 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fng96" podStartSLOduration=3.260330599 podStartE2EDuration="8.797646106s" podCreationTimestamp="2026-01-22 14:02:22 +0000 UTC" firstStartedPulling="2026-01-22 14:02:24.684679794 +0000 UTC m=+7652.262795629" lastFinishedPulling="2026-01-22 14:02:30.221995311 +0000 UTC m=+7657.800111136" observedRunningTime="2026-01-22 14:02:30.795668091 +0000 UTC m=+7658.373783916" watchObservedRunningTime="2026-01-22 14:02:30.797646106 +0000 UTC m=+7658.375761931" Jan 22 14:02:33 crc kubenswrapper[4773]: I0122 14:02:33.184378 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:33 crc kubenswrapper[4773]: I0122 14:02:33.184696 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:33 crc kubenswrapper[4773]: I0122 14:02:33.808916 4773 generic.go:334] "Generic (PLEG): container finished" podID="35ac9085-7569-42c7-a13f-a4c1101dc438" containerID="07308281d46bdcdb634ed6c8c856e2d41ea4b2d31aa4614b0682bce506abcde6" exitCode=0 Jan 22 14:02:33 crc kubenswrapper[4773]: I0122 14:02:33.809172 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" event={"ID":"35ac9085-7569-42c7-a13f-a4c1101dc438","Type":"ContainerDied","Data":"07308281d46bdcdb634ed6c8c856e2d41ea4b2d31aa4614b0682bce506abcde6"} Jan 22 14:02:34 crc kubenswrapper[4773]: I0122 14:02:34.250457 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fng96" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="registry-server" probeResult="failure" output=< Jan 22 14:02:34 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:02:34 crc kubenswrapper[4773]: > Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.419845 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.578015 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2thqp\" (UniqueName: \"kubernetes.io/projected/35ac9085-7569-42c7-a13f-a4c1101dc438-kube-api-access-2thqp\") pod \"35ac9085-7569-42c7-a13f-a4c1101dc438\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.578184 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-ssh-key-openstack-cell1\") pod \"35ac9085-7569-42c7-a13f-a4c1101dc438\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.578240 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-inventory\") pod \"35ac9085-7569-42c7-a13f-a4c1101dc438\" (UID: \"35ac9085-7569-42c7-a13f-a4c1101dc438\") " Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.589524 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35ac9085-7569-42c7-a13f-a4c1101dc438-kube-api-access-2thqp" (OuterVolumeSpecName: "kube-api-access-2thqp") pod "35ac9085-7569-42c7-a13f-a4c1101dc438" (UID: "35ac9085-7569-42c7-a13f-a4c1101dc438"). InnerVolumeSpecName "kube-api-access-2thqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.610740 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-inventory" (OuterVolumeSpecName: "inventory") pod "35ac9085-7569-42c7-a13f-a4c1101dc438" (UID: "35ac9085-7569-42c7-a13f-a4c1101dc438"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.633673 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "35ac9085-7569-42c7-a13f-a4c1101dc438" (UID: "35ac9085-7569-42c7-a13f-a4c1101dc438"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.683600 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2thqp\" (UniqueName: \"kubernetes.io/projected/35ac9085-7569-42c7-a13f-a4c1101dc438-kube-api-access-2thqp\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.683677 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.683690 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/35ac9085-7569-42c7-a13f-a4c1101dc438-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.836355 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.836452 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-w5d4l" event={"ID":"35ac9085-7569-42c7-a13f-a4c1101dc438","Type":"ContainerDied","Data":"1fb0c63382c6521e2ecb251f8c50b02eb183a4385e7689d511fc56720b142dff"} Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.836887 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fb0c63382c6521e2ecb251f8c50b02eb183a4385e7689d511fc56720b142dff" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.941560 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-hbg4m"] Jan 22 14:02:35 crc kubenswrapper[4773]: E0122 14:02:35.942175 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35ac9085-7569-42c7-a13f-a4c1101dc438" containerName="validate-network-openstack-openstack-cell1" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.942195 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="35ac9085-7569-42c7-a13f-a4c1101dc438" containerName="validate-network-openstack-openstack-cell1" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.942484 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="35ac9085-7569-42c7-a13f-a4c1101dc438" containerName="validate-network-openstack-openstack-cell1" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.943349 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.945664 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.946896 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.947076 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.947107 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:02:35 crc kubenswrapper[4773]: I0122 14:02:35.969196 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-hbg4m"] Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.094225 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.094357 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krkgp\" (UniqueName: \"kubernetes.io/projected/648e60a4-965f-456b-8b70-927b15c1a692-kube-api-access-krkgp\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.094498 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-inventory\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.196691 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-inventory\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.196935 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.197003 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krkgp\" (UniqueName: \"kubernetes.io/projected/648e60a4-965f-456b-8b70-927b15c1a692-kube-api-access-krkgp\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.202019 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-inventory\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.202085 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-ssh-key-openstack-cell1\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.221610 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krkgp\" (UniqueName: \"kubernetes.io/projected/648e60a4-965f-456b-8b70-927b15c1a692-kube-api-access-krkgp\") pod \"install-os-openstack-openstack-cell1-hbg4m\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.274756 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.830486 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-hbg4m"] Jan 22 14:02:36 crc kubenswrapper[4773]: W0122 14:02:36.832516 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod648e60a4_965f_456b_8b70_927b15c1a692.slice/crio-d966c8534c5c6a3753041c18db06c7ffe7169e8b93fe75142acf0babdaf24b10 WatchSource:0}: Error finding container d966c8534c5c6a3753041c18db06c7ffe7169e8b93fe75142acf0babdaf24b10: Status 404 returned error can't find the container with id d966c8534c5c6a3753041c18db06c7ffe7169e8b93fe75142acf0babdaf24b10 Jan 22 14:02:36 crc kubenswrapper[4773]: I0122 14:02:36.850799 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" event={"ID":"648e60a4-965f-456b-8b70-927b15c1a692","Type":"ContainerStarted","Data":"d966c8534c5c6a3753041c18db06c7ffe7169e8b93fe75142acf0babdaf24b10"} Jan 22 14:02:37 crc kubenswrapper[4773]: I0122 14:02:37.868205 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" event={"ID":"648e60a4-965f-456b-8b70-927b15c1a692","Type":"ContainerStarted","Data":"5d0673a923a4f98a06d7bef7b39ea696a5de3098890709bd5f2f378291cc622f"} Jan 22 14:02:37 crc kubenswrapper[4773]: I0122 14:02:37.889327 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" podStartSLOduration=2.383981241 podStartE2EDuration="2.889301598s" podCreationTimestamp="2026-01-22 14:02:35 +0000 UTC" firstStartedPulling="2026-01-22 14:02:36.835168424 +0000 UTC m=+7664.413284249" lastFinishedPulling="2026-01-22 14:02:37.340488771 +0000 UTC m=+7664.918604606" observedRunningTime="2026-01-22 14:02:37.882729882 +0000 UTC m=+7665.460845727" watchObservedRunningTime="2026-01-22 14:02:37.889301598 +0000 UTC m=+7665.467417433" Jan 22 14:02:43 crc kubenswrapper[4773]: I0122 14:02:43.235363 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:43 crc kubenswrapper[4773]: I0122 14:02:43.292750 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:43 crc kubenswrapper[4773]: I0122 14:02:43.492492 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fng96"] Jan 22 14:02:44 crc kubenswrapper[4773]: I0122 14:02:44.969772 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fng96" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="registry-server" containerID="cri-o://8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408" gracePeriod=2 Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.471957 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.540213 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-catalog-content\") pod \"b29a0d48-e08e-44e3-914a-983f8e3dff79\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.540491 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-utilities\") pod \"b29a0d48-e08e-44e3-914a-983f8e3dff79\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.540623 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwfwc\" (UniqueName: \"kubernetes.io/projected/b29a0d48-e08e-44e3-914a-983f8e3dff79-kube-api-access-zwfwc\") pod \"b29a0d48-e08e-44e3-914a-983f8e3dff79\" (UID: \"b29a0d48-e08e-44e3-914a-983f8e3dff79\") " Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.541115 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-utilities" (OuterVolumeSpecName: "utilities") pod "b29a0d48-e08e-44e3-914a-983f8e3dff79" (UID: "b29a0d48-e08e-44e3-914a-983f8e3dff79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.553697 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b29a0d48-e08e-44e3-914a-983f8e3dff79-kube-api-access-zwfwc" (OuterVolumeSpecName: "kube-api-access-zwfwc") pod "b29a0d48-e08e-44e3-914a-983f8e3dff79" (UID: "b29a0d48-e08e-44e3-914a-983f8e3dff79"). InnerVolumeSpecName "kube-api-access-zwfwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.643577 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.643609 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwfwc\" (UniqueName: \"kubernetes.io/projected/b29a0d48-e08e-44e3-914a-983f8e3dff79-kube-api-access-zwfwc\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.671143 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b29a0d48-e08e-44e3-914a-983f8e3dff79" (UID: "b29a0d48-e08e-44e3-914a-983f8e3dff79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.747799 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29a0d48-e08e-44e3-914a-983f8e3dff79-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.980144 4773 generic.go:334] "Generic (PLEG): container finished" podID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerID="8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408" exitCode=0 Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.980506 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerDied","Data":"8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408"} Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.981461 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fng96" event={"ID":"b29a0d48-e08e-44e3-914a-983f8e3dff79","Type":"ContainerDied","Data":"7791b101eb38cb3f1fd5b7d4d72623219a70dde38da378850c09a07f25856dd9"} Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.981489 4773 scope.go:117] "RemoveContainer" containerID="8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408" Jan 22 14:02:45 crc kubenswrapper[4773]: I0122 14:02:45.980619 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fng96" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.004321 4773 scope.go:117] "RemoveContainer" containerID="c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.024046 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fng96"] Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.034895 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fng96"] Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.039057 4773 scope.go:117] "RemoveContainer" containerID="7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.095634 4773 scope.go:117] "RemoveContainer" containerID="8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408" Jan 22 14:02:46 crc kubenswrapper[4773]: E0122 14:02:46.096239 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408\": container with ID starting with 8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408 not found: ID does not exist" containerID="8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.096330 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408"} err="failed to get container status \"8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408\": rpc error: code = NotFound desc = could not find container \"8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408\": container with ID starting with 8126070288b63c5b80c67f38f37d72dbbcf843372f71f951ef6567dcc8011408 not found: ID does not exist" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.096366 4773 scope.go:117] "RemoveContainer" containerID="c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35" Jan 22 14:02:46 crc kubenswrapper[4773]: E0122 14:02:46.096761 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35\": container with ID starting with c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35 not found: ID does not exist" containerID="c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.096790 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35"} err="failed to get container status \"c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35\": rpc error: code = NotFound desc = could not find container \"c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35\": container with ID starting with c9b1c3dc6d712f94f2d3cdc2a49562c62f03544369e7a8e6f8d031d946f56a35 not found: ID does not exist" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.096811 4773 scope.go:117] "RemoveContainer" containerID="7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4" Jan 22 14:02:46 crc kubenswrapper[4773]: E0122 14:02:46.097341 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4\": container with ID starting with 7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4 not found: ID does not exist" containerID="7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.097393 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4"} err="failed to get container status \"7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4\": rpc error: code = NotFound desc = could not find container \"7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4\": container with ID starting with 7e3de5def13a098a41c6de83c5507d7a6ee1b6b60ad2435d879ee5f6ee112de4 not found: ID does not exist" Jan 22 14:02:46 crc kubenswrapper[4773]: I0122 14:02:46.695835 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" path="/var/lib/kubelet/pods/b29a0d48-e08e-44e3-914a-983f8e3dff79/volumes" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.052787 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4wx6n"] Jan 22 14:02:56 crc kubenswrapper[4773]: E0122 14:02:56.053987 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="extract-content" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.054009 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="extract-content" Jan 22 14:02:56 crc kubenswrapper[4773]: E0122 14:02:56.054062 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="registry-server" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.054071 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="registry-server" Jan 22 14:02:56 crc kubenswrapper[4773]: E0122 14:02:56.054089 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="extract-utilities" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.054099 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="extract-utilities" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.054389 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b29a0d48-e08e-44e3-914a-983f8e3dff79" containerName="registry-server" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.056645 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.067133 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4wx6n"] Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.210366 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-utilities\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.210719 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5x2d\" (UniqueName: \"kubernetes.io/projected/d1904a1c-bba2-4422-897b-2a382aa645b2-kube-api-access-l5x2d\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.210793 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-catalog-content\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.241342 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8h59d"] Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.244502 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.260082 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8h59d"] Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.312453 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-utilities\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.312517 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5x2d\" (UniqueName: \"kubernetes.io/projected/d1904a1c-bba2-4422-897b-2a382aa645b2-kube-api-access-l5x2d\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.312561 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-catalog-content\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.313051 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-utilities\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.313123 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-catalog-content\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.342234 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5x2d\" (UniqueName: \"kubernetes.io/projected/d1904a1c-bba2-4422-897b-2a382aa645b2-kube-api-access-l5x2d\") pod \"certified-operators-4wx6n\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.414980 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-utilities\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.415065 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg52c\" (UniqueName: \"kubernetes.io/projected/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-kube-api-access-rg52c\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.415084 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-catalog-content\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.432912 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.517630 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-utilities\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.517756 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg52c\" (UniqueName: \"kubernetes.io/projected/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-kube-api-access-rg52c\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.517785 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-catalog-content\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.518689 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-catalog-content\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.518759 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-utilities\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.540028 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg52c\" (UniqueName: \"kubernetes.io/projected/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-kube-api-access-rg52c\") pod \"community-operators-8h59d\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:56 crc kubenswrapper[4773]: I0122 14:02:56.564625 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:02:57 crc kubenswrapper[4773]: I0122 14:02:57.075442 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4wx6n"] Jan 22 14:02:57 crc kubenswrapper[4773]: I0122 14:02:57.144209 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx6n" event={"ID":"d1904a1c-bba2-4422-897b-2a382aa645b2","Type":"ContainerStarted","Data":"02ee940a693406d9826e25b8821a6c9908b071b1980a2996f3530e617210a0c3"} Jan 22 14:02:57 crc kubenswrapper[4773]: I0122 14:02:57.242642 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8h59d"] Jan 22 14:02:57 crc kubenswrapper[4773]: W0122 14:02:57.253684 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4a4b475_9fdb_464b_bf05_e52b9fef91d4.slice/crio-16b6dd14a55723934af460860291d4decc3c767483528e082d681b22f26f8b46 WatchSource:0}: Error finding container 16b6dd14a55723934af460860291d4decc3c767483528e082d681b22f26f8b46: Status 404 returned error can't find the container with id 16b6dd14a55723934af460860291d4decc3c767483528e082d681b22f26f8b46 Jan 22 14:02:58 crc kubenswrapper[4773]: I0122 14:02:58.156786 4773 generic.go:334] "Generic (PLEG): container finished" podID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerID="e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e" exitCode=0 Jan 22 14:02:58 crc kubenswrapper[4773]: I0122 14:02:58.156876 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerDied","Data":"e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e"} Jan 22 14:02:58 crc kubenswrapper[4773]: I0122 14:02:58.157227 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerStarted","Data":"16b6dd14a55723934af460860291d4decc3c767483528e082d681b22f26f8b46"} Jan 22 14:02:58 crc kubenswrapper[4773]: I0122 14:02:58.161460 4773 generic.go:334] "Generic (PLEG): container finished" podID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerID="bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a" exitCode=0 Jan 22 14:02:58 crc kubenswrapper[4773]: I0122 14:02:58.161507 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx6n" event={"ID":"d1904a1c-bba2-4422-897b-2a382aa645b2","Type":"ContainerDied","Data":"bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a"} Jan 22 14:03:00 crc kubenswrapper[4773]: I0122 14:03:00.182887 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerStarted","Data":"564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222"} Jan 22 14:03:00 crc kubenswrapper[4773]: I0122 14:03:00.186858 4773 generic.go:334] "Generic (PLEG): container finished" podID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerID="a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998" exitCode=0 Jan 22 14:03:00 crc kubenswrapper[4773]: I0122 14:03:00.186905 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx6n" event={"ID":"d1904a1c-bba2-4422-897b-2a382aa645b2","Type":"ContainerDied","Data":"a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998"} Jan 22 14:03:01 crc kubenswrapper[4773]: I0122 14:03:01.199126 4773 generic.go:334] "Generic (PLEG): container finished" podID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerID="564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222" exitCode=0 Jan 22 14:03:01 crc kubenswrapper[4773]: I0122 14:03:01.199190 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerDied","Data":"564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222"} Jan 22 14:03:01 crc kubenswrapper[4773]: I0122 14:03:01.213469 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx6n" event={"ID":"d1904a1c-bba2-4422-897b-2a382aa645b2","Type":"ContainerStarted","Data":"4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c"} Jan 22 14:03:01 crc kubenswrapper[4773]: I0122 14:03:01.257350 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4wx6n" podStartSLOduration=2.7792076789999998 podStartE2EDuration="5.25726295s" podCreationTimestamp="2026-01-22 14:02:56 +0000 UTC" firstStartedPulling="2026-01-22 14:02:58.163798149 +0000 UTC m=+7685.741913974" lastFinishedPulling="2026-01-22 14:03:00.64185342 +0000 UTC m=+7688.219969245" observedRunningTime="2026-01-22 14:03:01.248668427 +0000 UTC m=+7688.826784312" watchObservedRunningTime="2026-01-22 14:03:01.25726295 +0000 UTC m=+7688.835378865" Jan 22 14:03:02 crc kubenswrapper[4773]: I0122 14:03:02.225922 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerStarted","Data":"5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9"} Jan 22 14:03:02 crc kubenswrapper[4773]: I0122 14:03:02.250003 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8h59d" podStartSLOduration=2.791126094 podStartE2EDuration="6.249979285s" podCreationTimestamp="2026-01-22 14:02:56 +0000 UTC" firstStartedPulling="2026-01-22 14:02:58.159090306 +0000 UTC m=+7685.737206131" lastFinishedPulling="2026-01-22 14:03:01.617943497 +0000 UTC m=+7689.196059322" observedRunningTime="2026-01-22 14:03:02.24312036 +0000 UTC m=+7689.821236195" watchObservedRunningTime="2026-01-22 14:03:02.249979285 +0000 UTC m=+7689.828095110" Jan 22 14:03:04 crc kubenswrapper[4773]: I0122 14:03:04.074412 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:03:04 crc kubenswrapper[4773]: I0122 14:03:04.074841 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:03:06 crc kubenswrapper[4773]: I0122 14:03:06.433531 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:03:06 crc kubenswrapper[4773]: I0122 14:03:06.433848 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:03:06 crc kubenswrapper[4773]: I0122 14:03:06.482642 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:03:06 crc kubenswrapper[4773]: I0122 14:03:06.564811 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:03:06 crc kubenswrapper[4773]: I0122 14:03:06.565965 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:03:06 crc kubenswrapper[4773]: I0122 14:03:06.619351 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:03:07 crc kubenswrapper[4773]: I0122 14:03:07.340261 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:03:07 crc kubenswrapper[4773]: I0122 14:03:07.349749 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:03:08 crc kubenswrapper[4773]: I0122 14:03:08.231673 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8h59d"] Jan 22 14:03:09 crc kubenswrapper[4773]: I0122 14:03:09.305536 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8h59d" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="registry-server" containerID="cri-o://5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9" gracePeriod=2 Jan 22 14:03:09 crc kubenswrapper[4773]: I0122 14:03:09.631769 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4wx6n"] Jan 22 14:03:09 crc kubenswrapper[4773]: I0122 14:03:09.632009 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4wx6n" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="registry-server" containerID="cri-o://4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c" gracePeriod=2 Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.139164 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.228710 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.277421 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5x2d\" (UniqueName: \"kubernetes.io/projected/d1904a1c-bba2-4422-897b-2a382aa645b2-kube-api-access-l5x2d\") pod \"d1904a1c-bba2-4422-897b-2a382aa645b2\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.277584 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-utilities\") pod \"d1904a1c-bba2-4422-897b-2a382aa645b2\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.277605 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-catalog-content\") pod \"d1904a1c-bba2-4422-897b-2a382aa645b2\" (UID: \"d1904a1c-bba2-4422-897b-2a382aa645b2\") " Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.278554 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-utilities" (OuterVolumeSpecName: "utilities") pod "d1904a1c-bba2-4422-897b-2a382aa645b2" (UID: "d1904a1c-bba2-4422-897b-2a382aa645b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.285661 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1904a1c-bba2-4422-897b-2a382aa645b2-kube-api-access-l5x2d" (OuterVolumeSpecName: "kube-api-access-l5x2d") pod "d1904a1c-bba2-4422-897b-2a382aa645b2" (UID: "d1904a1c-bba2-4422-897b-2a382aa645b2"). InnerVolumeSpecName "kube-api-access-l5x2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.326914 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1904a1c-bba2-4422-897b-2a382aa645b2" (UID: "d1904a1c-bba2-4422-897b-2a382aa645b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.335144 4773 generic.go:334] "Generic (PLEG): container finished" podID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerID="5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9" exitCode=0 Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.335205 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8h59d" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.335205 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerDied","Data":"5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9"} Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.335478 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8h59d" event={"ID":"d4a4b475-9fdb-464b-bf05-e52b9fef91d4","Type":"ContainerDied","Data":"16b6dd14a55723934af460860291d4decc3c767483528e082d681b22f26f8b46"} Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.335512 4773 scope.go:117] "RemoveContainer" containerID="5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.339518 4773 generic.go:334] "Generic (PLEG): container finished" podID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerID="4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c" exitCode=0 Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.339564 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx6n" event={"ID":"d1904a1c-bba2-4422-897b-2a382aa645b2","Type":"ContainerDied","Data":"4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c"} Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.339609 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx6n" event={"ID":"d1904a1c-bba2-4422-897b-2a382aa645b2","Type":"ContainerDied","Data":"02ee940a693406d9826e25b8821a6c9908b071b1980a2996f3530e617210a0c3"} Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.339803 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx6n" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.362169 4773 scope.go:117] "RemoveContainer" containerID="564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.379793 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg52c\" (UniqueName: \"kubernetes.io/projected/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-kube-api-access-rg52c\") pod \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.380076 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-utilities\") pod \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.380106 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-catalog-content\") pod \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\" (UID: \"d4a4b475-9fdb-464b-bf05-e52b9fef91d4\") " Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.380865 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5x2d\" (UniqueName: \"kubernetes.io/projected/d1904a1c-bba2-4422-897b-2a382aa645b2-kube-api-access-l5x2d\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.380885 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.380893 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1904a1c-bba2-4422-897b-2a382aa645b2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.383687 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-utilities" (OuterVolumeSpecName: "utilities") pod "d4a4b475-9fdb-464b-bf05-e52b9fef91d4" (UID: "d4a4b475-9fdb-464b-bf05-e52b9fef91d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.386308 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-kube-api-access-rg52c" (OuterVolumeSpecName: "kube-api-access-rg52c") pod "d4a4b475-9fdb-464b-bf05-e52b9fef91d4" (UID: "d4a4b475-9fdb-464b-bf05-e52b9fef91d4"). InnerVolumeSpecName "kube-api-access-rg52c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.387144 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4wx6n"] Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.396890 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4wx6n"] Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.398427 4773 scope.go:117] "RemoveContainer" containerID="e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.422646 4773 scope.go:117] "RemoveContainer" containerID="5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9" Jan 22 14:03:10 crc kubenswrapper[4773]: E0122 14:03:10.424388 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9\": container with ID starting with 5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9 not found: ID does not exist" containerID="5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.424421 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9"} err="failed to get container status \"5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9\": rpc error: code = NotFound desc = could not find container \"5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9\": container with ID starting with 5dabff2c4d2a56d644d6cc5a4703bfe5b3b3302e0a5f1b4fb0a2c9f859e87db9 not found: ID does not exist" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.424446 4773 scope.go:117] "RemoveContainer" containerID="564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222" Jan 22 14:03:10 crc kubenswrapper[4773]: E0122 14:03:10.424883 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222\": container with ID starting with 564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222 not found: ID does not exist" containerID="564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.424911 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222"} err="failed to get container status \"564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222\": rpc error: code = NotFound desc = could not find container \"564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222\": container with ID starting with 564d6f135c31ab1bd2000db89868e4f3e2458ecaa15cf3f686ed4a2f4e8f7222 not found: ID does not exist" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.424927 4773 scope.go:117] "RemoveContainer" containerID="e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e" Jan 22 14:03:10 crc kubenswrapper[4773]: E0122 14:03:10.425191 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e\": container with ID starting with e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e not found: ID does not exist" containerID="e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.425220 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e"} err="failed to get container status \"e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e\": rpc error: code = NotFound desc = could not find container \"e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e\": container with ID starting with e97752ca2470a8ced2ce1e230a4b466e53ecf18617a5330c88a6e7163b917c9e not found: ID does not exist" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.425233 4773 scope.go:117] "RemoveContainer" containerID="4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.440347 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4a4b475-9fdb-464b-bf05-e52b9fef91d4" (UID: "d4a4b475-9fdb-464b-bf05-e52b9fef91d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.483349 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg52c\" (UniqueName: \"kubernetes.io/projected/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-kube-api-access-rg52c\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.483395 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.483410 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4a4b475-9fdb-464b-bf05-e52b9fef91d4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.491326 4773 scope.go:117] "RemoveContainer" containerID="a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.524659 4773 scope.go:117] "RemoveContainer" containerID="bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.566754 4773 scope.go:117] "RemoveContainer" containerID="4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c" Jan 22 14:03:10 crc kubenswrapper[4773]: E0122 14:03:10.567219 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c\": container with ID starting with 4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c not found: ID does not exist" containerID="4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.567259 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c"} err="failed to get container status \"4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c\": rpc error: code = NotFound desc = could not find container \"4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c\": container with ID starting with 4dafe5ae10c4f316d4825155fb1584702cad9b25685dba9344d77faec4f6007c not found: ID does not exist" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.567367 4773 scope.go:117] "RemoveContainer" containerID="a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998" Jan 22 14:03:10 crc kubenswrapper[4773]: E0122 14:03:10.567711 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998\": container with ID starting with a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998 not found: ID does not exist" containerID="a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.567743 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998"} err="failed to get container status \"a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998\": rpc error: code = NotFound desc = could not find container \"a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998\": container with ID starting with a8c7fc79f83aeade8338bb4057b0a0062089e275bfcf548108cf7997d6ef1998 not found: ID does not exist" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.567759 4773 scope.go:117] "RemoveContainer" containerID="bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a" Jan 22 14:03:10 crc kubenswrapper[4773]: E0122 14:03:10.567965 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a\": container with ID starting with bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a not found: ID does not exist" containerID="bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.568004 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a"} err="failed to get container status \"bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a\": rpc error: code = NotFound desc = could not find container \"bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a\": container with ID starting with bdc6a226050d09f6ad7be0e2b3049bda8f998915e51aa6fb07b6980459321f5a not found: ID does not exist" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.674955 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" path="/var/lib/kubelet/pods/d1904a1c-bba2-4422-897b-2a382aa645b2/volumes" Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.675729 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8h59d"] Jan 22 14:03:10 crc kubenswrapper[4773]: I0122 14:03:10.697277 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8h59d"] Jan 22 14:03:12 crc kubenswrapper[4773]: I0122 14:03:12.671485 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" path="/var/lib/kubelet/pods/d4a4b475-9fdb-464b-bf05-e52b9fef91d4/volumes" Jan 22 14:03:24 crc kubenswrapper[4773]: I0122 14:03:24.522305 4773 generic.go:334] "Generic (PLEG): container finished" podID="648e60a4-965f-456b-8b70-927b15c1a692" containerID="5d0673a923a4f98a06d7bef7b39ea696a5de3098890709bd5f2f378291cc622f" exitCode=0 Jan 22 14:03:24 crc kubenswrapper[4773]: I0122 14:03:24.522897 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" event={"ID":"648e60a4-965f-456b-8b70-927b15c1a692","Type":"ContainerDied","Data":"5d0673a923a4f98a06d7bef7b39ea696a5de3098890709bd5f2f378291cc622f"} Jan 22 14:03:25 crc kubenswrapper[4773]: I0122 14:03:25.979140 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:03:25 crc kubenswrapper[4773]: I0122 14:03:25.987855 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-ssh-key-openstack-cell1\") pod \"648e60a4-965f-456b-8b70-927b15c1a692\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " Jan 22 14:03:25 crc kubenswrapper[4773]: I0122 14:03:25.988081 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krkgp\" (UniqueName: \"kubernetes.io/projected/648e60a4-965f-456b-8b70-927b15c1a692-kube-api-access-krkgp\") pod \"648e60a4-965f-456b-8b70-927b15c1a692\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " Jan 22 14:03:25 crc kubenswrapper[4773]: I0122 14:03:25.988367 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-inventory\") pod \"648e60a4-965f-456b-8b70-927b15c1a692\" (UID: \"648e60a4-965f-456b-8b70-927b15c1a692\") " Jan 22 14:03:25 crc kubenswrapper[4773]: I0122 14:03:25.994275 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/648e60a4-965f-456b-8b70-927b15c1a692-kube-api-access-krkgp" (OuterVolumeSpecName: "kube-api-access-krkgp") pod "648e60a4-965f-456b-8b70-927b15c1a692" (UID: "648e60a4-965f-456b-8b70-927b15c1a692"). InnerVolumeSpecName "kube-api-access-krkgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.022168 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-inventory" (OuterVolumeSpecName: "inventory") pod "648e60a4-965f-456b-8b70-927b15c1a692" (UID: "648e60a4-965f-456b-8b70-927b15c1a692"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.028038 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "648e60a4-965f-456b-8b70-927b15c1a692" (UID: "648e60a4-965f-456b-8b70-927b15c1a692"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.091016 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krkgp\" (UniqueName: \"kubernetes.io/projected/648e60a4-965f-456b-8b70-927b15c1a692-kube-api-access-krkgp\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.091381 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.091398 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/648e60a4-965f-456b-8b70-927b15c1a692-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.552894 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" event={"ID":"648e60a4-965f-456b-8b70-927b15c1a692","Type":"ContainerDied","Data":"d966c8534c5c6a3753041c18db06c7ffe7169e8b93fe75142acf0babdaf24b10"} Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.552960 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d966c8534c5c6a3753041c18db06c7ffe7169e8b93fe75142acf0babdaf24b10" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.553035 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hbg4m" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.641451 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-kpr4l"] Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.641930 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="extract-content" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.641950 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="extract-content" Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.641973 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="extract-content" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.641980 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="extract-content" Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.641995 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="648e60a4-965f-456b-8b70-927b15c1a692" containerName="install-os-openstack-openstack-cell1" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642003 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="648e60a4-965f-456b-8b70-927b15c1a692" containerName="install-os-openstack-openstack-cell1" Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.642021 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="extract-utilities" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642029 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="extract-utilities" Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.642052 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="extract-utilities" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642060 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="extract-utilities" Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.642084 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="registry-server" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642093 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="registry-server" Jan 22 14:03:26 crc kubenswrapper[4773]: E0122 14:03:26.642113 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="registry-server" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642120 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="registry-server" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642386 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1904a1c-bba2-4422-897b-2a382aa645b2" containerName="registry-server" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642405 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a4b475-9fdb-464b-bf05-e52b9fef91d4" containerName="registry-server" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.642426 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="648e60a4-965f-456b-8b70-927b15c1a692" containerName="install-os-openstack-openstack-cell1" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.643417 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.655446 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-kpr4l"] Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.682865 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.683078 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.683139 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.683236 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.804554 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.804672 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-inventory\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.804706 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv6zp\" (UniqueName: \"kubernetes.io/projected/0c3b769b-d2e7-4491-b723-fdd503391eff-kube-api-access-tv6zp\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.907223 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.907980 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-inventory\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.908054 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv6zp\" (UniqueName: \"kubernetes.io/projected/0c3b769b-d2e7-4491-b723-fdd503391eff-kube-api-access-tv6zp\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.913909 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-ssh-key-openstack-cell1\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.924016 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-inventory\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:26 crc kubenswrapper[4773]: I0122 14:03:26.929809 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv6zp\" (UniqueName: \"kubernetes.io/projected/0c3b769b-d2e7-4491-b723-fdd503391eff-kube-api-access-tv6zp\") pod \"configure-os-openstack-openstack-cell1-kpr4l\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:27 crc kubenswrapper[4773]: I0122 14:03:27.000733 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:03:27 crc kubenswrapper[4773]: I0122 14:03:27.539552 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-kpr4l"] Jan 22 14:03:27 crc kubenswrapper[4773]: I0122 14:03:27.564050 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" event={"ID":"0c3b769b-d2e7-4491-b723-fdd503391eff","Type":"ContainerStarted","Data":"2e25edcd321bd709fc7ac49d466b153c2b0a1131c5d6ff23573a0d1c45358837"} Jan 22 14:03:28 crc kubenswrapper[4773]: I0122 14:03:28.621906 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" event={"ID":"0c3b769b-d2e7-4491-b723-fdd503391eff","Type":"ContainerStarted","Data":"558550d26fb5024bc360b5c1e51e7c394f6ec7f26f107a4f7f3cd4c359b2c148"} Jan 22 14:03:28 crc kubenswrapper[4773]: I0122 14:03:28.659458 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" podStartSLOduration=2.167195601 podStartE2EDuration="2.659435853s" podCreationTimestamp="2026-01-22 14:03:26 +0000 UTC" firstStartedPulling="2026-01-22 14:03:27.53880641 +0000 UTC m=+7715.116922235" lastFinishedPulling="2026-01-22 14:03:28.031046662 +0000 UTC m=+7715.609162487" observedRunningTime="2026-01-22 14:03:28.647892066 +0000 UTC m=+7716.226007891" watchObservedRunningTime="2026-01-22 14:03:28.659435853 +0000 UTC m=+7716.237551678" Jan 22 14:03:34 crc kubenswrapper[4773]: I0122 14:03:34.074029 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:03:34 crc kubenswrapper[4773]: I0122 14:03:34.074652 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.073940 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.074439 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.074509 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.075450 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.075511 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" gracePeriod=600 Jan 22 14:04:04 crc kubenswrapper[4773]: E0122 14:04:04.449037 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.536586 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" exitCode=0 Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.536660 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e"} Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.536733 4773 scope.go:117] "RemoveContainer" containerID="3ed07aff4ff5fa2ed28234057c36f8999a1e1cd3208798180a3cb17cdd63263f" Jan 22 14:04:04 crc kubenswrapper[4773]: I0122 14:04:04.537789 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:04:04 crc kubenswrapper[4773]: E0122 14:04:04.538115 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:04:16 crc kubenswrapper[4773]: I0122 14:04:16.658377 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:04:16 crc kubenswrapper[4773]: E0122 14:04:16.659362 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:04:19 crc kubenswrapper[4773]: I0122 14:04:19.019843 4773 generic.go:334] "Generic (PLEG): container finished" podID="0c3b769b-d2e7-4491-b723-fdd503391eff" containerID="558550d26fb5024bc360b5c1e51e7c394f6ec7f26f107a4f7f3cd4c359b2c148" exitCode=0 Jan 22 14:04:19 crc kubenswrapper[4773]: I0122 14:04:19.020457 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" event={"ID":"0c3b769b-d2e7-4491-b723-fdd503391eff","Type":"ContainerDied","Data":"558550d26fb5024bc360b5c1e51e7c394f6ec7f26f107a4f7f3cd4c359b2c148"} Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.680453 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.772857 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-inventory\") pod \"0c3b769b-d2e7-4491-b723-fdd503391eff\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.772900 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-ssh-key-openstack-cell1\") pod \"0c3b769b-d2e7-4491-b723-fdd503391eff\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.773025 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tv6zp\" (UniqueName: \"kubernetes.io/projected/0c3b769b-d2e7-4491-b723-fdd503391eff-kube-api-access-tv6zp\") pod \"0c3b769b-d2e7-4491-b723-fdd503391eff\" (UID: \"0c3b769b-d2e7-4491-b723-fdd503391eff\") " Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.780697 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c3b769b-d2e7-4491-b723-fdd503391eff-kube-api-access-tv6zp" (OuterVolumeSpecName: "kube-api-access-tv6zp") pod "0c3b769b-d2e7-4491-b723-fdd503391eff" (UID: "0c3b769b-d2e7-4491-b723-fdd503391eff"). InnerVolumeSpecName "kube-api-access-tv6zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.813342 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-inventory" (OuterVolumeSpecName: "inventory") pod "0c3b769b-d2e7-4491-b723-fdd503391eff" (UID: "0c3b769b-d2e7-4491-b723-fdd503391eff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.818839 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "0c3b769b-d2e7-4491-b723-fdd503391eff" (UID: "0c3b769b-d2e7-4491-b723-fdd503391eff"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.876763 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.876798 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0c3b769b-d2e7-4491-b723-fdd503391eff-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:20.876818 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tv6zp\" (UniqueName: \"kubernetes.io/projected/0c3b769b-d2e7-4491-b723-fdd503391eff-kube-api-access-tv6zp\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.042807 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" event={"ID":"0c3b769b-d2e7-4491-b723-fdd503391eff","Type":"ContainerDied","Data":"2e25edcd321bd709fc7ac49d466b153c2b0a1131c5d6ff23573a0d1c45358837"} Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.042850 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-kpr4l" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.042874 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e25edcd321bd709fc7ac49d466b153c2b0a1131c5d6ff23573a0d1c45358837" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.147862 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-d25vw"] Jan 22 14:04:21 crc kubenswrapper[4773]: E0122 14:04:21.148722 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c3b769b-d2e7-4491-b723-fdd503391eff" containerName="configure-os-openstack-openstack-cell1" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.148737 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c3b769b-d2e7-4491-b723-fdd503391eff" containerName="configure-os-openstack-openstack-cell1" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.149008 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c3b769b-d2e7-4491-b723-fdd503391eff" containerName="configure-os-openstack-openstack-cell1" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.149957 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.154063 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.154229 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.154334 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.154619 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.171273 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-d25vw"] Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.185950 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-inventory-0\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.186094 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.186369 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q26fx\" (UniqueName: \"kubernetes.io/projected/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-kube-api-access-q26fx\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.287570 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-inventory-0\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.287691 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.287781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q26fx\" (UniqueName: \"kubernetes.io/projected/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-kube-api-access-q26fx\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: E0122 14:04:21.290004 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c3b769b_d2e7_4491_b723_fdd503391eff.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c3b769b_d2e7_4491_b723_fdd503391eff.slice/crio-2e25edcd321bd709fc7ac49d466b153c2b0a1131c5d6ff23573a0d1c45358837\": RecentStats: unable to find data in memory cache]" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.294441 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.310507 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-inventory-0\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.315631 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q26fx\" (UniqueName: \"kubernetes.io/projected/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-kube-api-access-q26fx\") pod \"ssh-known-hosts-openstack-d25vw\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:21 crc kubenswrapper[4773]: I0122 14:04:21.489978 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:22 crc kubenswrapper[4773]: I0122 14:04:22.109248 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-d25vw"] Jan 22 14:04:23 crc kubenswrapper[4773]: I0122 14:04:23.262185 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-d25vw" event={"ID":"10d12b02-ba12-4fa5-b5f0-8b898ba5141e","Type":"ContainerStarted","Data":"3d71fd47b4e320e8fc79a3f9ebe813ed18b5043da38f2ee7dc3bb878e512bdad"} Jan 22 14:04:24 crc kubenswrapper[4773]: I0122 14:04:24.276770 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-d25vw" event={"ID":"10d12b02-ba12-4fa5-b5f0-8b898ba5141e","Type":"ContainerStarted","Data":"d600836e8fb1057d1128d606f93f956c2aecd5d37a5f34f98a3365d3b297e36a"} Jan 22 14:04:24 crc kubenswrapper[4773]: I0122 14:04:24.310128 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-d25vw" podStartSLOduration=2.190648588 podStartE2EDuration="3.310093596s" podCreationTimestamp="2026-01-22 14:04:21 +0000 UTC" firstStartedPulling="2026-01-22 14:04:22.117943841 +0000 UTC m=+7769.696059666" lastFinishedPulling="2026-01-22 14:04:23.237388849 +0000 UTC m=+7770.815504674" observedRunningTime="2026-01-22 14:04:24.304545889 +0000 UTC m=+7771.882661774" watchObservedRunningTime="2026-01-22 14:04:24.310093596 +0000 UTC m=+7771.888209421" Jan 22 14:04:31 crc kubenswrapper[4773]: I0122 14:04:31.658862 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:04:31 crc kubenswrapper[4773]: E0122 14:04:31.659631 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:04:33 crc kubenswrapper[4773]: I0122 14:04:33.928921 4773 generic.go:334] "Generic (PLEG): container finished" podID="10d12b02-ba12-4fa5-b5f0-8b898ba5141e" containerID="d600836e8fb1057d1128d606f93f956c2aecd5d37a5f34f98a3365d3b297e36a" exitCode=0 Jan 22 14:04:33 crc kubenswrapper[4773]: I0122 14:04:33.929052 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-d25vw" event={"ID":"10d12b02-ba12-4fa5-b5f0-8b898ba5141e","Type":"ContainerDied","Data":"d600836e8fb1057d1128d606f93f956c2aecd5d37a5f34f98a3365d3b297e36a"} Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.468803 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.575207 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q26fx\" (UniqueName: \"kubernetes.io/projected/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-kube-api-access-q26fx\") pod \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.575419 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-ssh-key-openstack-cell1\") pod \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.575488 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-inventory-0\") pod \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\" (UID: \"10d12b02-ba12-4fa5-b5f0-8b898ba5141e\") " Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.582782 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-kube-api-access-q26fx" (OuterVolumeSpecName: "kube-api-access-q26fx") pod "10d12b02-ba12-4fa5-b5f0-8b898ba5141e" (UID: "10d12b02-ba12-4fa5-b5f0-8b898ba5141e"). InnerVolumeSpecName "kube-api-access-q26fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.614695 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "10d12b02-ba12-4fa5-b5f0-8b898ba5141e" (UID: "10d12b02-ba12-4fa5-b5f0-8b898ba5141e"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.633973 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "10d12b02-ba12-4fa5-b5f0-8b898ba5141e" (UID: "10d12b02-ba12-4fa5-b5f0-8b898ba5141e"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.686868 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.686908 4773 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.686926 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q26fx\" (UniqueName: \"kubernetes.io/projected/10d12b02-ba12-4fa5-b5f0-8b898ba5141e-kube-api-access-q26fx\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.953358 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-d25vw" event={"ID":"10d12b02-ba12-4fa5-b5f0-8b898ba5141e","Type":"ContainerDied","Data":"3d71fd47b4e320e8fc79a3f9ebe813ed18b5043da38f2ee7dc3bb878e512bdad"} Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.953596 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d71fd47b4e320e8fc79a3f9ebe813ed18b5043da38f2ee7dc3bb878e512bdad" Jan 22 14:04:35 crc kubenswrapper[4773]: I0122 14:04:35.953614 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-d25vw" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.047442 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-dxj54"] Jan 22 14:04:36 crc kubenswrapper[4773]: E0122 14:04:36.047955 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d12b02-ba12-4fa5-b5f0-8b898ba5141e" containerName="ssh-known-hosts-openstack" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.047975 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d12b02-ba12-4fa5-b5f0-8b898ba5141e" containerName="ssh-known-hosts-openstack" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.048167 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="10d12b02-ba12-4fa5-b5f0-8b898ba5141e" containerName="ssh-known-hosts-openstack" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.049078 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.051196 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.051263 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.051518 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.051857 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.060537 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-dxj54"] Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.212136 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-inventory\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.212303 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5z9n\" (UniqueName: \"kubernetes.io/projected/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-kube-api-access-l5z9n\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.212337 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.314755 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-inventory\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.314870 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5z9n\" (UniqueName: \"kubernetes.io/projected/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-kube-api-access-l5z9n\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.314898 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.318807 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-ssh-key-openstack-cell1\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.318852 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-inventory\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.346940 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5z9n\" (UniqueName: \"kubernetes.io/projected/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-kube-api-access-l5z9n\") pod \"run-os-openstack-openstack-cell1-dxj54\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:36 crc kubenswrapper[4773]: I0122 14:04:36.376041 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:37 crc kubenswrapper[4773]: I0122 14:04:37.135268 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-dxj54"] Jan 22 14:04:37 crc kubenswrapper[4773]: I0122 14:04:37.982144 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-dxj54" event={"ID":"0e69d61d-845b-4cfe-97aa-2b2f5fde0040","Type":"ContainerStarted","Data":"bf6e61acea75830e2b22e2537bd4cc87ffc701a524ab651cc943ba787c3334f0"} Jan 22 14:04:37 crc kubenswrapper[4773]: I0122 14:04:37.982491 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-dxj54" event={"ID":"0e69d61d-845b-4cfe-97aa-2b2f5fde0040","Type":"ContainerStarted","Data":"d85fa04750e4c96c5f4ed7856783181b64d1f6f7257976c855d82e89a568b30c"} Jan 22 14:04:38 crc kubenswrapper[4773]: I0122 14:04:38.013318 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-dxj54" podStartSLOduration=1.501509302 podStartE2EDuration="2.013248597s" podCreationTimestamp="2026-01-22 14:04:36 +0000 UTC" firstStartedPulling="2026-01-22 14:04:37.153727904 +0000 UTC m=+7784.731843729" lastFinishedPulling="2026-01-22 14:04:37.665467199 +0000 UTC m=+7785.243583024" observedRunningTime="2026-01-22 14:04:37.995117103 +0000 UTC m=+7785.573232938" watchObservedRunningTime="2026-01-22 14:04:38.013248597 +0000 UTC m=+7785.591364422" Jan 22 14:04:46 crc kubenswrapper[4773]: I0122 14:04:46.658868 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:04:46 crc kubenswrapper[4773]: E0122 14:04:46.661379 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:04:48 crc kubenswrapper[4773]: I0122 14:04:48.275454 4773 generic.go:334] "Generic (PLEG): container finished" podID="0e69d61d-845b-4cfe-97aa-2b2f5fde0040" containerID="bf6e61acea75830e2b22e2537bd4cc87ffc701a524ab651cc943ba787c3334f0" exitCode=0 Jan 22 14:04:48 crc kubenswrapper[4773]: I0122 14:04:48.275544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-dxj54" event={"ID":"0e69d61d-845b-4cfe-97aa-2b2f5fde0040","Type":"ContainerDied","Data":"bf6e61acea75830e2b22e2537bd4cc87ffc701a524ab651cc943ba787c3334f0"} Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.704248 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.849775 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-inventory\") pod \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.850141 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-ssh-key-openstack-cell1\") pod \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.850426 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5z9n\" (UniqueName: \"kubernetes.io/projected/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-kube-api-access-l5z9n\") pod \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\" (UID: \"0e69d61d-845b-4cfe-97aa-2b2f5fde0040\") " Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.855596 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-kube-api-access-l5z9n" (OuterVolumeSpecName: "kube-api-access-l5z9n") pod "0e69d61d-845b-4cfe-97aa-2b2f5fde0040" (UID: "0e69d61d-845b-4cfe-97aa-2b2f5fde0040"). InnerVolumeSpecName "kube-api-access-l5z9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.881557 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "0e69d61d-845b-4cfe-97aa-2b2f5fde0040" (UID: "0e69d61d-845b-4cfe-97aa-2b2f5fde0040"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.881651 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-inventory" (OuterVolumeSpecName: "inventory") pod "0e69d61d-845b-4cfe-97aa-2b2f5fde0040" (UID: "0e69d61d-845b-4cfe-97aa-2b2f5fde0040"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.952767 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.952812 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:49 crc kubenswrapper[4773]: I0122 14:04:49.952827 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5z9n\" (UniqueName: \"kubernetes.io/projected/0e69d61d-845b-4cfe-97aa-2b2f5fde0040-kube-api-access-l5z9n\") on node \"crc\" DevicePath \"\"" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.296109 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-dxj54" event={"ID":"0e69d61d-845b-4cfe-97aa-2b2f5fde0040","Type":"ContainerDied","Data":"d85fa04750e4c96c5f4ed7856783181b64d1f6f7257976c855d82e89a568b30c"} Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.296387 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d85fa04750e4c96c5f4ed7856783181b64d1f6f7257976c855d82e89a568b30c" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.296180 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-dxj54" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.438550 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-lnlt2"] Jan 22 14:04:50 crc kubenswrapper[4773]: E0122 14:04:50.439051 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e69d61d-845b-4cfe-97aa-2b2f5fde0040" containerName="run-os-openstack-openstack-cell1" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.439068 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e69d61d-845b-4cfe-97aa-2b2f5fde0040" containerName="run-os-openstack-openstack-cell1" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.439278 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e69d61d-845b-4cfe-97aa-2b2f5fde0040" containerName="run-os-openstack-openstack-cell1" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.440018 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.442260 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.443273 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.447021 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.454337 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-lnlt2"] Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.460504 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.467707 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-inventory\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.467787 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.467832 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqllp\" (UniqueName: \"kubernetes.io/projected/d673e2cc-043b-49a8-9426-715a50fdc54c-kube-api-access-dqllp\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.570184 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-inventory\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.570677 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.570829 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqllp\" (UniqueName: \"kubernetes.io/projected/d673e2cc-043b-49a8-9426-715a50fdc54c-kube-api-access-dqllp\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.573886 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-ssh-key-openstack-cell1\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.574581 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-inventory\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.594409 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqllp\" (UniqueName: \"kubernetes.io/projected/d673e2cc-043b-49a8-9426-715a50fdc54c-kube-api-access-dqllp\") pod \"reboot-os-openstack-openstack-cell1-lnlt2\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:50 crc kubenswrapper[4773]: I0122 14:04:50.761915 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:04:51 crc kubenswrapper[4773]: I0122 14:04:51.338572 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-lnlt2"] Jan 22 14:04:51 crc kubenswrapper[4773]: W0122 14:04:51.339121 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd673e2cc_043b_49a8_9426_715a50fdc54c.slice/crio-83d42897e98ddaa137bdd4916843c6cf2f45dccebf7b08a56fcebe206ff04c41 WatchSource:0}: Error finding container 83d42897e98ddaa137bdd4916843c6cf2f45dccebf7b08a56fcebe206ff04c41: Status 404 returned error can't find the container with id 83d42897e98ddaa137bdd4916843c6cf2f45dccebf7b08a56fcebe206ff04c41 Jan 22 14:04:52 crc kubenswrapper[4773]: I0122 14:04:52.315791 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" event={"ID":"d673e2cc-043b-49a8-9426-715a50fdc54c","Type":"ContainerStarted","Data":"b3279ba02523cd129ccc0c23c084cc01dd5378a50e1ac00730d352dff99e755f"} Jan 22 14:04:52 crc kubenswrapper[4773]: I0122 14:04:52.316070 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" event={"ID":"d673e2cc-043b-49a8-9426-715a50fdc54c","Type":"ContainerStarted","Data":"83d42897e98ddaa137bdd4916843c6cf2f45dccebf7b08a56fcebe206ff04c41"} Jan 22 14:04:52 crc kubenswrapper[4773]: I0122 14:04:52.333020 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" podStartSLOduration=1.926141174 podStartE2EDuration="2.332998067s" podCreationTimestamp="2026-01-22 14:04:50 +0000 UTC" firstStartedPulling="2026-01-22 14:04:51.343218101 +0000 UTC m=+7798.921333926" lastFinishedPulling="2026-01-22 14:04:51.750074994 +0000 UTC m=+7799.328190819" observedRunningTime="2026-01-22 14:04:52.332981566 +0000 UTC m=+7799.911097401" watchObservedRunningTime="2026-01-22 14:04:52.332998067 +0000 UTC m=+7799.911113892" Jan 22 14:05:01 crc kubenswrapper[4773]: I0122 14:05:01.658669 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:05:01 crc kubenswrapper[4773]: E0122 14:05:01.659933 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:05:08 crc kubenswrapper[4773]: I0122 14:05:08.679999 4773 generic.go:334] "Generic (PLEG): container finished" podID="d673e2cc-043b-49a8-9426-715a50fdc54c" containerID="b3279ba02523cd129ccc0c23c084cc01dd5378a50e1ac00730d352dff99e755f" exitCode=0 Jan 22 14:05:08 crc kubenswrapper[4773]: I0122 14:05:08.680101 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" event={"ID":"d673e2cc-043b-49a8-9426-715a50fdc54c","Type":"ContainerDied","Data":"b3279ba02523cd129ccc0c23c084cc01dd5378a50e1ac00730d352dff99e755f"} Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.139821 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.243891 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-ssh-key-openstack-cell1\") pod \"d673e2cc-043b-49a8-9426-715a50fdc54c\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.243974 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-inventory\") pod \"d673e2cc-043b-49a8-9426-715a50fdc54c\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.244021 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqllp\" (UniqueName: \"kubernetes.io/projected/d673e2cc-043b-49a8-9426-715a50fdc54c-kube-api-access-dqllp\") pod \"d673e2cc-043b-49a8-9426-715a50fdc54c\" (UID: \"d673e2cc-043b-49a8-9426-715a50fdc54c\") " Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.250376 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d673e2cc-043b-49a8-9426-715a50fdc54c-kube-api-access-dqllp" (OuterVolumeSpecName: "kube-api-access-dqllp") pod "d673e2cc-043b-49a8-9426-715a50fdc54c" (UID: "d673e2cc-043b-49a8-9426-715a50fdc54c"). InnerVolumeSpecName "kube-api-access-dqllp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.274767 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-inventory" (OuterVolumeSpecName: "inventory") pod "d673e2cc-043b-49a8-9426-715a50fdc54c" (UID: "d673e2cc-043b-49a8-9426-715a50fdc54c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.286854 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "d673e2cc-043b-49a8-9426-715a50fdc54c" (UID: "d673e2cc-043b-49a8-9426-715a50fdc54c"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.347402 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.347733 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d673e2cc-043b-49a8-9426-715a50fdc54c-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.347748 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqllp\" (UniqueName: \"kubernetes.io/projected/d673e2cc-043b-49a8-9426-715a50fdc54c-kube-api-access-dqllp\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.702353 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" event={"ID":"d673e2cc-043b-49a8-9426-715a50fdc54c","Type":"ContainerDied","Data":"83d42897e98ddaa137bdd4916843c6cf2f45dccebf7b08a56fcebe206ff04c41"} Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.702421 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83d42897e98ddaa137bdd4916843c6cf2f45dccebf7b08a56fcebe206ff04c41" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.702439 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-lnlt2" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.810514 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-t788q"] Jan 22 14:05:10 crc kubenswrapper[4773]: E0122 14:05:10.811050 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d673e2cc-043b-49a8-9426-715a50fdc54c" containerName="reboot-os-openstack-openstack-cell1" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.811069 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d673e2cc-043b-49a8-9426-715a50fdc54c" containerName="reboot-os-openstack-openstack-cell1" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.811325 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d673e2cc-043b-49a8-9426-715a50fdc54c" containerName="reboot-os-openstack-openstack-cell1" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.812144 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.815110 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-neutron-metadata-default-certs-0" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.815479 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-telemetry-default-certs-0" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.820139 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-t788q"] Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.825161 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.827792 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.827790 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.827885 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-ovn-default-certs-0" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.827927 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.829002 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-libvirt-default-certs-0" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960153 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-neutron-metadata-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960238 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960269 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960317 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960387 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960423 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-inventory\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960472 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960503 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960602 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-telemetry-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960812 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960906 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2hwl\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-kube-api-access-j2hwl\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.960980 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-libvirt-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.961009 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-ovn-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.961035 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:10 crc kubenswrapper[4773]: I0122 14:05:10.961130 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062676 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-telemetry-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062775 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062824 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2hwl\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-kube-api-access-j2hwl\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062890 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-libvirt-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062912 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-ovn-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062929 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.062973 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063005 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-neutron-metadata-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063029 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063048 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063075 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063104 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063125 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-inventory\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063159 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.063183 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.067951 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.068416 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.068807 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.069312 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.069673 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.071913 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.073016 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-telemetry-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.073186 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-libvirt-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.073618 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ssh-key-openstack-cell1\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.073669 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-inventory\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.073829 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-ovn-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.074782 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.075252 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-neutron-metadata-default-certs-0\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.085259 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.088407 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2hwl\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-kube-api-access-j2hwl\") pod \"install-certs-openstack-openstack-cell1-t788q\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.144353 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.689345 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-t788q"] Jan 22 14:05:11 crc kubenswrapper[4773]: I0122 14:05:11.712389 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t788q" event={"ID":"f7ede466-9619-44bb-af40-1dd8e773f7b3","Type":"ContainerStarted","Data":"f5d72f88ed8152650b21660682bdd0c5523ff9b44845ec0baf43e381d6f7554f"} Jan 22 14:05:12 crc kubenswrapper[4773]: I0122 14:05:12.731069 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t788q" event={"ID":"f7ede466-9619-44bb-af40-1dd8e773f7b3","Type":"ContainerStarted","Data":"9e72c90e80bda2decbf8de4de7e62de1883f1d4b4f3666e1b65bb7da1ee3e13c"} Jan 22 14:05:12 crc kubenswrapper[4773]: I0122 14:05:12.753578 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-t788q" podStartSLOduration=2.347032147 podStartE2EDuration="2.75355861s" podCreationTimestamp="2026-01-22 14:05:10 +0000 UTC" firstStartedPulling="2026-01-22 14:05:11.692178517 +0000 UTC m=+7819.270294342" lastFinishedPulling="2026-01-22 14:05:12.09870499 +0000 UTC m=+7819.676820805" observedRunningTime="2026-01-22 14:05:12.749246058 +0000 UTC m=+7820.327361903" watchObservedRunningTime="2026-01-22 14:05:12.75355861 +0000 UTC m=+7820.331674435" Jan 22 14:05:13 crc kubenswrapper[4773]: I0122 14:05:13.658815 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:05:13 crc kubenswrapper[4773]: E0122 14:05:13.659142 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:05:28 crc kubenswrapper[4773]: I0122 14:05:28.658226 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:05:28 crc kubenswrapper[4773]: E0122 14:05:28.659093 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:05:39 crc kubenswrapper[4773]: I0122 14:05:39.658466 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:05:39 crc kubenswrapper[4773]: E0122 14:05:39.659485 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:05:49 crc kubenswrapper[4773]: I0122 14:05:49.126152 4773 generic.go:334] "Generic (PLEG): container finished" podID="f7ede466-9619-44bb-af40-1dd8e773f7b3" containerID="9e72c90e80bda2decbf8de4de7e62de1883f1d4b4f3666e1b65bb7da1ee3e13c" exitCode=0 Jan 22 14:05:49 crc kubenswrapper[4773]: I0122 14:05:49.127100 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t788q" event={"ID":"f7ede466-9619-44bb-af40-1dd8e773f7b3","Type":"ContainerDied","Data":"9e72c90e80bda2decbf8de4de7e62de1883f1d4b4f3666e1b65bb7da1ee3e13c"} Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.564033 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.658413 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:05:50 crc kubenswrapper[4773]: E0122 14:05:50.658698 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.706690 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ovn-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.706752 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-metadata-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.706821 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-ovn-default-certs-0\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.706922 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-inventory\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707137 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-nova-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707189 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-sriov-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707214 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-libvirt-default-certs-0\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707271 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-neutron-metadata-default-certs-0\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707343 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2hwl\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-kube-api-access-j2hwl\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707387 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-libvirt-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707415 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-telemetry-default-certs-0\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707469 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-telemetry-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707572 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-dhcp-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707625 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-bootstrap-combined-ca-bundle\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.707663 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ssh-key-openstack-cell1\") pod \"f7ede466-9619-44bb-af40-1dd8e773f7b3\" (UID: \"f7ede466-9619-44bb-af40-1dd8e773f7b3\") " Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.714669 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.714723 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.715070 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.715096 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.716143 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-cell1-libvirt-default-certs-0") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "openstack-cell1-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.715380 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.715475 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-cell1-neutron-metadata-default-certs-0") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "openstack-cell1-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.717049 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.717080 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-cell1-telemetry-default-certs-0") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "openstack-cell1-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.717408 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-cell1-ovn-default-certs-0") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "openstack-cell1-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.717959 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.718250 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-kube-api-access-j2hwl" (OuterVolumeSpecName: "kube-api-access-j2hwl") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "kube-api-access-j2hwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.732607 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.739706 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.751412 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-inventory" (OuterVolumeSpecName: "inventory") pod "f7ede466-9619-44bb-af40-1dd8e773f7b3" (UID: "f7ede466-9619-44bb-af40-1dd8e773f7b3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.811945 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812308 4773 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812340 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812358 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812376 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812392 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2hwl\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-kube-api-access-j2hwl\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812406 4773 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812420 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812437 4773 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812451 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812465 4773 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812478 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812493 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812506 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7ede466-9619-44bb-af40-1dd8e773f7b3-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:50 crc kubenswrapper[4773]: I0122 14:05:50.812521 4773 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f7ede466-9619-44bb-af40-1dd8e773f7b3-openstack-cell1-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.153157 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t788q" event={"ID":"f7ede466-9619-44bb-af40-1dd8e773f7b3","Type":"ContainerDied","Data":"f5d72f88ed8152650b21660682bdd0c5523ff9b44845ec0baf43e381d6f7554f"} Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.153213 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5d72f88ed8152650b21660682bdd0c5523ff9b44845ec0baf43e381d6f7554f" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.153347 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t788q" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.274082 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-rvtvw"] Jan 22 14:05:51 crc kubenswrapper[4773]: E0122 14:05:51.274614 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7ede466-9619-44bb-af40-1dd8e773f7b3" containerName="install-certs-openstack-openstack-cell1" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.274633 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7ede466-9619-44bb-af40-1dd8e773f7b3" containerName="install-certs-openstack-openstack-cell1" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.274882 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7ede466-9619-44bb-af40-1dd8e773f7b3" containerName="install-certs-openstack-openstack-cell1" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.275730 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.278335 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.279245 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.279325 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.279325 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.279612 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.284125 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-rvtvw"] Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.424881 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.425192 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.425450 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wcc8\" (UniqueName: \"kubernetes.io/projected/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-kube-api-access-2wcc8\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.425516 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.425818 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-inventory\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.528132 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.528240 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wcc8\" (UniqueName: \"kubernetes.io/projected/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-kube-api-access-2wcc8\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.528312 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.528459 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-inventory\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.528528 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.529355 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.532749 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ssh-key-openstack-cell1\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.532888 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.543748 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-inventory\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.557383 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wcc8\" (UniqueName: \"kubernetes.io/projected/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-kube-api-access-2wcc8\") pod \"ovn-openstack-openstack-cell1-rvtvw\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:51 crc kubenswrapper[4773]: I0122 14:05:51.591797 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:05:52 crc kubenswrapper[4773]: I0122 14:05:52.011591 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-rvtvw"] Jan 22 14:05:52 crc kubenswrapper[4773]: I0122 14:05:52.165876 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" event={"ID":"5f66e0f1-ce22-40a2-b251-c010c5c57aa9","Type":"ContainerStarted","Data":"bbbb7da7cbde22c82f91e13fce1fbff50b2fd0c9df185cb6676fbef936ef3df1"} Jan 22 14:05:53 crc kubenswrapper[4773]: I0122 14:05:53.177972 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" event={"ID":"5f66e0f1-ce22-40a2-b251-c010c5c57aa9","Type":"ContainerStarted","Data":"73cea5c62c6c45aafe23be08dd5f0d3454c2481d26ff0991b5786b03f878fb0b"} Jan 22 14:05:53 crc kubenswrapper[4773]: I0122 14:05:53.207245 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" podStartSLOduration=1.745792327 podStartE2EDuration="2.207222386s" podCreationTimestamp="2026-01-22 14:05:51 +0000 UTC" firstStartedPulling="2026-01-22 14:05:52.035458603 +0000 UTC m=+7859.613574428" lastFinishedPulling="2026-01-22 14:05:52.496888662 +0000 UTC m=+7860.075004487" observedRunningTime="2026-01-22 14:05:53.199625711 +0000 UTC m=+7860.777741546" watchObservedRunningTime="2026-01-22 14:05:53.207222386 +0000 UTC m=+7860.785338211" Jan 22 14:06:05 crc kubenswrapper[4773]: I0122 14:06:05.659165 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:06:05 crc kubenswrapper[4773]: E0122 14:06:05.660188 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:06:18 crc kubenswrapper[4773]: I0122 14:06:18.658455 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:06:18 crc kubenswrapper[4773]: E0122 14:06:18.659191 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:06:29 crc kubenswrapper[4773]: I0122 14:06:29.658661 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:06:29 crc kubenswrapper[4773]: E0122 14:06:29.659690 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:06:43 crc kubenswrapper[4773]: I0122 14:06:43.658746 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:06:43 crc kubenswrapper[4773]: E0122 14:06:43.659502 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:06:53 crc kubenswrapper[4773]: I0122 14:06:53.840656 4773 generic.go:334] "Generic (PLEG): container finished" podID="5f66e0f1-ce22-40a2-b251-c010c5c57aa9" containerID="73cea5c62c6c45aafe23be08dd5f0d3454c2481d26ff0991b5786b03f878fb0b" exitCode=0 Jan 22 14:06:53 crc kubenswrapper[4773]: I0122 14:06:53.840742 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" event={"ID":"5f66e0f1-ce22-40a2-b251-c010c5c57aa9","Type":"ContainerDied","Data":"73cea5c62c6c45aafe23be08dd5f0d3454c2481d26ff0991b5786b03f878fb0b"} Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.659117 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:06:55 crc kubenswrapper[4773]: E0122 14:06:55.660066 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.715171 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.863902 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" event={"ID":"5f66e0f1-ce22-40a2-b251-c010c5c57aa9","Type":"ContainerDied","Data":"bbbb7da7cbde22c82f91e13fce1fbff50b2fd0c9df185cb6676fbef936ef3df1"} Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.863946 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbbb7da7cbde22c82f91e13fce1fbff50b2fd0c9df185cb6676fbef936ef3df1" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.863997 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-rvtvw" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.904107 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovncontroller-config-0\") pod \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.904215 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-inventory\") pod \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.904885 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wcc8\" (UniqueName: \"kubernetes.io/projected/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-kube-api-access-2wcc8\") pod \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.904959 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovn-combined-ca-bundle\") pod \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.905086 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ssh-key-openstack-cell1\") pod \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\" (UID: \"5f66e0f1-ce22-40a2-b251-c010c5c57aa9\") " Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.913215 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-kube-api-access-2wcc8" (OuterVolumeSpecName: "kube-api-access-2wcc8") pod "5f66e0f1-ce22-40a2-b251-c010c5c57aa9" (UID: "5f66e0f1-ce22-40a2-b251-c010c5c57aa9"). InnerVolumeSpecName "kube-api-access-2wcc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.948972 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5f66e0f1-ce22-40a2-b251-c010c5c57aa9" (UID: "5f66e0f1-ce22-40a2-b251-c010c5c57aa9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.953020 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "5f66e0f1-ce22-40a2-b251-c010c5c57aa9" (UID: "5f66e0f1-ce22-40a2-b251-c010c5c57aa9"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.953582 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "5f66e0f1-ce22-40a2-b251-c010c5c57aa9" (UID: "5f66e0f1-ce22-40a2-b251-c010c5c57aa9"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.970665 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-inventory" (OuterVolumeSpecName: "inventory") pod "5f66e0f1-ce22-40a2-b251-c010c5c57aa9" (UID: "5f66e0f1-ce22-40a2-b251-c010c5c57aa9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.988926 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-vhsn4"] Jan 22 14:06:55 crc kubenswrapper[4773]: E0122 14:06:55.989426 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f66e0f1-ce22-40a2-b251-c010c5c57aa9" containerName="ovn-openstack-openstack-cell1" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.989448 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f66e0f1-ce22-40a2-b251-c010c5c57aa9" containerName="ovn-openstack-openstack-cell1" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.989718 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f66e0f1-ce22-40a2-b251-c010c5c57aa9" containerName="ovn-openstack-openstack-cell1" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.990654 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.996225 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Jan 22 14:06:55 crc kubenswrapper[4773]: I0122 14:06:55.996992 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.004522 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-vhsn4"] Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.014957 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.015080 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.015164 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.015466 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.015527 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9qvw\" (UniqueName: \"kubernetes.io/projected/1cacf4a2-5633-412a-8e19-0f0c81b673c0-kube-api-access-t9qvw\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.015556 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.019435 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wcc8\" (UniqueName: \"kubernetes.io/projected/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-kube-api-access-2wcc8\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.019549 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.019567 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.019581 4773 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.019595 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f66e0f1-ce22-40a2-b251-c010c5c57aa9-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.121815 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.122163 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.122373 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.122521 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.122642 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9qvw\" (UniqueName: \"kubernetes.io/projected/1cacf4a2-5633-412a-8e19-0f0c81b673c0-kube-api-access-t9qvw\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.122802 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.126107 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.126415 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.126472 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.127671 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.128028 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-ssh-key-openstack-cell1\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.139305 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9qvw\" (UniqueName: \"kubernetes.io/projected/1cacf4a2-5633-412a-8e19-0f0c81b673c0-kube-api-access-t9qvw\") pod \"neutron-metadata-openstack-openstack-cell1-vhsn4\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.365116 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:06:56 crc kubenswrapper[4773]: I0122 14:06:56.995415 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-vhsn4"] Jan 22 14:06:57 crc kubenswrapper[4773]: I0122 14:06:57.885896 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" event={"ID":"1cacf4a2-5633-412a-8e19-0f0c81b673c0","Type":"ContainerStarted","Data":"dd4c7a32351d002832aee84e5cc5202034dbd1b08c83d5e66f017917299b75c8"} Jan 22 14:06:58 crc kubenswrapper[4773]: I0122 14:06:58.898154 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" event={"ID":"1cacf4a2-5633-412a-8e19-0f0c81b673c0","Type":"ContainerStarted","Data":"212a896b2fdbea739d5b65984546a7a2c35043b13d96e5f499bbe109135bbe95"} Jan 22 14:06:58 crc kubenswrapper[4773]: I0122 14:06:58.922974 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" podStartSLOduration=3.118697578 podStartE2EDuration="3.922953405s" podCreationTimestamp="2026-01-22 14:06:55 +0000 UTC" firstStartedPulling="2026-01-22 14:06:57.000924195 +0000 UTC m=+7924.579040020" lastFinishedPulling="2026-01-22 14:06:57.805180022 +0000 UTC m=+7925.383295847" observedRunningTime="2026-01-22 14:06:58.912140359 +0000 UTC m=+7926.490256184" watchObservedRunningTime="2026-01-22 14:06:58.922953405 +0000 UTC m=+7926.501069230" Jan 22 14:07:09 crc kubenswrapper[4773]: I0122 14:07:09.659012 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:07:09 crc kubenswrapper[4773]: E0122 14:07:09.660163 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:07:22 crc kubenswrapper[4773]: I0122 14:07:22.667248 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:07:22 crc kubenswrapper[4773]: E0122 14:07:22.668329 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:07:34 crc kubenswrapper[4773]: I0122 14:07:34.658799 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:07:34 crc kubenswrapper[4773]: E0122 14:07:34.659737 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:07:49 crc kubenswrapper[4773]: I0122 14:07:49.658187 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:07:49 crc kubenswrapper[4773]: E0122 14:07:49.659115 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:07:50 crc kubenswrapper[4773]: I0122 14:07:50.438361 4773 generic.go:334] "Generic (PLEG): container finished" podID="1cacf4a2-5633-412a-8e19-0f0c81b673c0" containerID="212a896b2fdbea739d5b65984546a7a2c35043b13d96e5f499bbe109135bbe95" exitCode=0 Jan 22 14:07:50 crc kubenswrapper[4773]: I0122 14:07:50.438429 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" event={"ID":"1cacf4a2-5633-412a-8e19-0f0c81b673c0","Type":"ContainerDied","Data":"212a896b2fdbea739d5b65984546a7a2c35043b13d96e5f499bbe109135bbe95"} Jan 22 14:07:51 crc kubenswrapper[4773]: I0122 14:07:51.920570 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.088443 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-inventory\") pod \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.088733 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-ovn-metadata-agent-neutron-config-0\") pod \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.088793 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-nova-metadata-neutron-config-0\") pod \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.088869 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-ssh-key-openstack-cell1\") pod \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.089051 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9qvw\" (UniqueName: \"kubernetes.io/projected/1cacf4a2-5633-412a-8e19-0f0c81b673c0-kube-api-access-t9qvw\") pod \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.089116 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-metadata-combined-ca-bundle\") pod \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\" (UID: \"1cacf4a2-5633-412a-8e19-0f0c81b673c0\") " Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.094221 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "1cacf4a2-5633-412a-8e19-0f0c81b673c0" (UID: "1cacf4a2-5633-412a-8e19-0f0c81b673c0"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.094310 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cacf4a2-5633-412a-8e19-0f0c81b673c0-kube-api-access-t9qvw" (OuterVolumeSpecName: "kube-api-access-t9qvw") pod "1cacf4a2-5633-412a-8e19-0f0c81b673c0" (UID: "1cacf4a2-5633-412a-8e19-0f0c81b673c0"). InnerVolumeSpecName "kube-api-access-t9qvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.118809 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-inventory" (OuterVolumeSpecName: "inventory") pod "1cacf4a2-5633-412a-8e19-0f0c81b673c0" (UID: "1cacf4a2-5633-412a-8e19-0f0c81b673c0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.122442 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "1cacf4a2-5633-412a-8e19-0f0c81b673c0" (UID: "1cacf4a2-5633-412a-8e19-0f0c81b673c0"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.128273 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "1cacf4a2-5633-412a-8e19-0f0c81b673c0" (UID: "1cacf4a2-5633-412a-8e19-0f0c81b673c0"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.129554 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "1cacf4a2-5633-412a-8e19-0f0c81b673c0" (UID: "1cacf4a2-5633-412a-8e19-0f0c81b673c0"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.192180 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.192567 4773 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.192714 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.192821 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9qvw\" (UniqueName: \"kubernetes.io/projected/1cacf4a2-5633-412a-8e19-0f0c81b673c0-kube-api-access-t9qvw\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.192986 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.193078 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1cacf4a2-5633-412a-8e19-0f0c81b673c0-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.460637 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" event={"ID":"1cacf4a2-5633-412a-8e19-0f0c81b673c0","Type":"ContainerDied","Data":"dd4c7a32351d002832aee84e5cc5202034dbd1b08c83d5e66f017917299b75c8"} Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.461168 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd4c7a32351d002832aee84e5cc5202034dbd1b08c83d5e66f017917299b75c8" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.460748 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-vhsn4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.596330 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-vmjs4"] Jan 22 14:07:52 crc kubenswrapper[4773]: E0122 14:07:52.597021 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cacf4a2-5633-412a-8e19-0f0c81b673c0" containerName="neutron-metadata-openstack-openstack-cell1" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.597041 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cacf4a2-5633-412a-8e19-0f0c81b673c0" containerName="neutron-metadata-openstack-openstack-cell1" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.597513 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cacf4a2-5633-412a-8e19-0f0c81b673c0" containerName="neutron-metadata-openstack-openstack-cell1" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.598572 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.605955 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.606151 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.606265 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.606421 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.606885 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.607830 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-vmjs4"] Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.714334 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-inventory\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.714474 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.714646 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.714754 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.715255 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5vcp\" (UniqueName: \"kubernetes.io/projected/cc3dfbd1-565d-4291-806a-65c53fd7b75f-kube-api-access-g5vcp\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.817126 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5vcp\" (UniqueName: \"kubernetes.io/projected/cc3dfbd1-565d-4291-806a-65c53fd7b75f-kube-api-access-g5vcp\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.817443 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-inventory\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.817534 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.817582 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.817640 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.822687 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.822828 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-inventory\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.823529 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-ssh-key-openstack-cell1\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.831347 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.841017 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5vcp\" (UniqueName: \"kubernetes.io/projected/cc3dfbd1-565d-4291-806a-65c53fd7b75f-kube-api-access-g5vcp\") pod \"libvirt-openstack-openstack-cell1-vmjs4\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:52 crc kubenswrapper[4773]: I0122 14:07:52.918772 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:07:53 crc kubenswrapper[4773]: I0122 14:07:53.555897 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-vmjs4"] Jan 22 14:07:53 crc kubenswrapper[4773]: I0122 14:07:53.566771 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:07:54 crc kubenswrapper[4773]: I0122 14:07:54.124674 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:07:54 crc kubenswrapper[4773]: I0122 14:07:54.497797 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" event={"ID":"cc3dfbd1-565d-4291-806a-65c53fd7b75f","Type":"ContainerStarted","Data":"04601b89158da66eff1519a1fefe074168ab26521ea7e1ffbab31f8be37b0f57"} Jan 22 14:07:55 crc kubenswrapper[4773]: I0122 14:07:55.517094 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" event={"ID":"cc3dfbd1-565d-4291-806a-65c53fd7b75f","Type":"ContainerStarted","Data":"e03ec29be933837263434520baf7a048780b3472ba211b4b155e3bc664120d7d"} Jan 22 14:07:55 crc kubenswrapper[4773]: I0122 14:07:55.555419 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" podStartSLOduration=3.000641811 podStartE2EDuration="3.555388435s" podCreationTimestamp="2026-01-22 14:07:52 +0000 UTC" firstStartedPulling="2026-01-22 14:07:53.566417348 +0000 UTC m=+7981.144533173" lastFinishedPulling="2026-01-22 14:07:54.121163982 +0000 UTC m=+7981.699279797" observedRunningTime="2026-01-22 14:07:55.546813162 +0000 UTC m=+7983.124929017" watchObservedRunningTime="2026-01-22 14:07:55.555388435 +0000 UTC m=+7983.133504270" Jan 22 14:08:04 crc kubenswrapper[4773]: I0122 14:08:04.658634 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:08:04 crc kubenswrapper[4773]: E0122 14:08:04.659763 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:08:17 crc kubenswrapper[4773]: I0122 14:08:17.657899 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:08:17 crc kubenswrapper[4773]: E0122 14:08:17.658826 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:08:32 crc kubenswrapper[4773]: I0122 14:08:32.665109 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:08:32 crc kubenswrapper[4773]: E0122 14:08:32.665831 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:08:46 crc kubenswrapper[4773]: I0122 14:08:46.658401 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:08:46 crc kubenswrapper[4773]: E0122 14:08:46.659100 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:08:58 crc kubenswrapper[4773]: I0122 14:08:58.657933 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:08:58 crc kubenswrapper[4773]: E0122 14:08:58.658846 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:09:11 crc kubenswrapper[4773]: I0122 14:09:11.658078 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:09:12 crc kubenswrapper[4773]: I0122 14:09:12.643738 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"9a9766962cd095cd50b29838c89ec652a579111c490c476653932e62ff669648"} Jan 22 14:11:34 crc kubenswrapper[4773]: I0122 14:11:34.074227 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:11:34 crc kubenswrapper[4773]: I0122 14:11:34.074805 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:12:04 crc kubenswrapper[4773]: I0122 14:12:04.073953 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:12:04 crc kubenswrapper[4773]: I0122 14:12:04.074586 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:12:21 crc kubenswrapper[4773]: I0122 14:12:21.316533 4773 generic.go:334] "Generic (PLEG): container finished" podID="cc3dfbd1-565d-4291-806a-65c53fd7b75f" containerID="e03ec29be933837263434520baf7a048780b3472ba211b4b155e3bc664120d7d" exitCode=0 Jan 22 14:12:21 crc kubenswrapper[4773]: I0122 14:12:21.316674 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" event={"ID":"cc3dfbd1-565d-4291-806a-65c53fd7b75f","Type":"ContainerDied","Data":"e03ec29be933837263434520baf7a048780b3472ba211b4b155e3bc664120d7d"} Jan 22 14:12:22 crc kubenswrapper[4773]: I0122 14:12:22.858170 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.084993 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-inventory\") pod \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.085055 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-ssh-key-openstack-cell1\") pod \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.085142 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-secret-0\") pod \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.085209 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5vcp\" (UniqueName: \"kubernetes.io/projected/cc3dfbd1-565d-4291-806a-65c53fd7b75f-kube-api-access-g5vcp\") pod \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.085295 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-combined-ca-bundle\") pod \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\" (UID: \"cc3dfbd1-565d-4291-806a-65c53fd7b75f\") " Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.171658 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "cc3dfbd1-565d-4291-806a-65c53fd7b75f" (UID: "cc3dfbd1-565d-4291-806a-65c53fd7b75f"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.206925 4773 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.227562 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc3dfbd1-565d-4291-806a-65c53fd7b75f-kube-api-access-g5vcp" (OuterVolumeSpecName: "kube-api-access-g5vcp") pod "cc3dfbd1-565d-4291-806a-65c53fd7b75f" (UID: "cc3dfbd1-565d-4291-806a-65c53fd7b75f"). InnerVolumeSpecName "kube-api-access-g5vcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.275579 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "cc3dfbd1-565d-4291-806a-65c53fd7b75f" (UID: "cc3dfbd1-565d-4291-806a-65c53fd7b75f"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.291928 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-inventory" (OuterVolumeSpecName: "inventory") pod "cc3dfbd1-565d-4291-806a-65c53fd7b75f" (UID: "cc3dfbd1-565d-4291-806a-65c53fd7b75f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.292235 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "cc3dfbd1-565d-4291-806a-65c53fd7b75f" (UID: "cc3dfbd1-565d-4291-806a-65c53fd7b75f"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.309020 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5vcp\" (UniqueName: \"kubernetes.io/projected/cc3dfbd1-565d-4291-806a-65c53fd7b75f-kube-api-access-g5vcp\") on node \"crc\" DevicePath \"\"" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.309201 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.309315 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.309419 4773 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/cc3dfbd1-565d-4291-806a-65c53fd7b75f-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.342803 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" event={"ID":"cc3dfbd1-565d-4291-806a-65c53fd7b75f","Type":"ContainerDied","Data":"04601b89158da66eff1519a1fefe074168ab26521ea7e1ffbab31f8be37b0f57"} Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.342858 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04601b89158da66eff1519a1fefe074168ab26521ea7e1ffbab31f8be37b0f57" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.342886 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-vmjs4" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.456008 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-flgnp"] Jan 22 14:12:23 crc kubenswrapper[4773]: E0122 14:12:23.456655 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc3dfbd1-565d-4291-806a-65c53fd7b75f" containerName="libvirt-openstack-openstack-cell1" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.456688 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc3dfbd1-565d-4291-806a-65c53fd7b75f" containerName="libvirt-openstack-openstack-cell1" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.456945 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc3dfbd1-565d-4291-806a-65c53fd7b75f" containerName="libvirt-openstack-openstack-cell1" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.457930 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.464866 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.464889 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.465049 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.465103 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.465185 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.465302 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.465310 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.475616 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-flgnp"] Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617375 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617436 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-inventory\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617540 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617637 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617690 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617769 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq4h7\" (UniqueName: \"kubernetes.io/projected/48876acf-700b-43be-94ae-7773bf8e8cbf-kube-api-access-jq4h7\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617861 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617897 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.617919 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.720620 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-inventory\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.720786 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.722513 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.722675 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.722868 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.723862 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq4h7\" (UniqueName: \"kubernetes.io/projected/48876acf-700b-43be-94ae-7773bf8e8cbf-kube-api-access-jq4h7\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.724207 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.724310 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.724402 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.724640 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.730145 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.730642 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.730665 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-inventory\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.731266 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.731264 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.731777 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.737376 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.743334 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq4h7\" (UniqueName: \"kubernetes.io/projected/48876acf-700b-43be-94ae-7773bf8e8cbf-kube-api-access-jq4h7\") pod \"nova-cell1-openstack-openstack-cell1-flgnp\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:23 crc kubenswrapper[4773]: I0122 14:12:23.799790 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:12:24 crc kubenswrapper[4773]: I0122 14:12:24.194343 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-flgnp"] Jan 22 14:12:24 crc kubenswrapper[4773]: I0122 14:12:24.389618 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" event={"ID":"48876acf-700b-43be-94ae-7773bf8e8cbf","Type":"ContainerStarted","Data":"c4693a27bfa72065c13208c4a07c31e5b16fba536204ae41666c9451886e5b3d"} Jan 22 14:12:27 crc kubenswrapper[4773]: I0122 14:12:27.424636 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" event={"ID":"48876acf-700b-43be-94ae-7773bf8e8cbf","Type":"ContainerStarted","Data":"8001a3b339412536f0e56036ea08d969d21d2b26390af3cfb9dc75e8f9b55615"} Jan 22 14:12:27 crc kubenswrapper[4773]: I0122 14:12:27.448545 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" podStartSLOduration=2.174114395 podStartE2EDuration="4.448502627s" podCreationTimestamp="2026-01-22 14:12:23 +0000 UTC" firstStartedPulling="2026-01-22 14:12:24.199567947 +0000 UTC m=+8251.777683792" lastFinishedPulling="2026-01-22 14:12:26.473956199 +0000 UTC m=+8254.052072024" observedRunningTime="2026-01-22 14:12:27.444217145 +0000 UTC m=+8255.022332980" watchObservedRunningTime="2026-01-22 14:12:27.448502627 +0000 UTC m=+8255.026618452" Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.198429 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.199041 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.199108 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.200301 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9a9766962cd095cd50b29838c89ec652a579111c490c476653932e62ff669648"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.200392 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://9a9766962cd095cd50b29838c89ec652a579111c490c476653932e62ff669648" gracePeriod=600 Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.650179 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="9a9766962cd095cd50b29838c89ec652a579111c490c476653932e62ff669648" exitCode=0 Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.650246 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"9a9766962cd095cd50b29838c89ec652a579111c490c476653932e62ff669648"} Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.650770 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad"} Jan 22 14:12:34 crc kubenswrapper[4773]: I0122 14:12:34.650857 4773 scope.go:117] "RemoveContainer" containerID="699cb7adb318f5b61d0f7584a54015d5e3537f5c225629459cdda7c7080e411e" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.496776 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cptph"] Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.501605 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.508497 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cptph"] Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.647681 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-catalog-content\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.647801 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkmrd\" (UniqueName: \"kubernetes.io/projected/ebb77cfc-306e-4edb-ad40-f65db99c5f44-kube-api-access-jkmrd\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.648313 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-utilities\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.750058 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-utilities\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.750537 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-catalog-content\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.750701 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-utilities\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.750875 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkmrd\" (UniqueName: \"kubernetes.io/projected/ebb77cfc-306e-4edb-ad40-f65db99c5f44-kube-api-access-jkmrd\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.750977 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-catalog-content\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.773314 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkmrd\" (UniqueName: \"kubernetes.io/projected/ebb77cfc-306e-4edb-ad40-f65db99c5f44-kube-api-access-jkmrd\") pod \"community-operators-cptph\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:14 crc kubenswrapper[4773]: I0122 14:14:14.845615 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:15 crc kubenswrapper[4773]: I0122 14:14:15.459175 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cptph"] Jan 22 14:14:16 crc kubenswrapper[4773]: I0122 14:14:16.221738 4773 generic.go:334] "Generic (PLEG): container finished" podID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerID="4cc2e6bdfb43f8588349fa6fed0230fdac87c7ff7a5dd27ed47e8a884874b409" exitCode=0 Jan 22 14:14:16 crc kubenswrapper[4773]: I0122 14:14:16.221846 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cptph" event={"ID":"ebb77cfc-306e-4edb-ad40-f65db99c5f44","Type":"ContainerDied","Data":"4cc2e6bdfb43f8588349fa6fed0230fdac87c7ff7a5dd27ed47e8a884874b409"} Jan 22 14:14:16 crc kubenswrapper[4773]: I0122 14:14:16.222056 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cptph" event={"ID":"ebb77cfc-306e-4edb-ad40-f65db99c5f44","Type":"ContainerStarted","Data":"fafeab652b964353dae995bfabf9a2ff4e5d95e64a6e2ed4587b31354a8491b5"} Jan 22 14:14:16 crc kubenswrapper[4773]: I0122 14:14:16.224395 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:14:19 crc kubenswrapper[4773]: I0122 14:14:19.255224 4773 generic.go:334] "Generic (PLEG): container finished" podID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerID="7f9973e6696776bbea912c2421bb375b2de045bbb2cdf78f5d339c42d3b2d0ea" exitCode=0 Jan 22 14:14:19 crc kubenswrapper[4773]: I0122 14:14:19.255267 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cptph" event={"ID":"ebb77cfc-306e-4edb-ad40-f65db99c5f44","Type":"ContainerDied","Data":"7f9973e6696776bbea912c2421bb375b2de045bbb2cdf78f5d339c42d3b2d0ea"} Jan 22 14:14:20 crc kubenswrapper[4773]: I0122 14:14:20.270432 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cptph" event={"ID":"ebb77cfc-306e-4edb-ad40-f65db99c5f44","Type":"ContainerStarted","Data":"4986b6bbb20447e0d442b323cee8ab8a1fd08d61324b9722efd9cb4ceb7894d0"} Jan 22 14:14:20 crc kubenswrapper[4773]: I0122 14:14:20.304631 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cptph" podStartSLOduration=2.847730545 podStartE2EDuration="6.304600029s" podCreationTimestamp="2026-01-22 14:14:14 +0000 UTC" firstStartedPulling="2026-01-22 14:14:16.224042987 +0000 UTC m=+8363.802158822" lastFinishedPulling="2026-01-22 14:14:19.680912481 +0000 UTC m=+8367.259028306" observedRunningTime="2026-01-22 14:14:20.293097693 +0000 UTC m=+8367.871213528" watchObservedRunningTime="2026-01-22 14:14:20.304600029 +0000 UTC m=+8367.882715864" Jan 22 14:14:24 crc kubenswrapper[4773]: I0122 14:14:24.845882 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:24 crc kubenswrapper[4773]: I0122 14:14:24.846408 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:24 crc kubenswrapper[4773]: I0122 14:14:24.918211 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:25 crc kubenswrapper[4773]: I0122 14:14:25.371136 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:25 crc kubenswrapper[4773]: I0122 14:14:25.426558 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cptph"] Jan 22 14:14:27 crc kubenswrapper[4773]: I0122 14:14:27.340184 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cptph" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="registry-server" containerID="cri-o://4986b6bbb20447e0d442b323cee8ab8a1fd08d61324b9722efd9cb4ceb7894d0" gracePeriod=2 Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.356810 4773 generic.go:334] "Generic (PLEG): container finished" podID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerID="4986b6bbb20447e0d442b323cee8ab8a1fd08d61324b9722efd9cb4ceb7894d0" exitCode=0 Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.356895 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cptph" event={"ID":"ebb77cfc-306e-4edb-ad40-f65db99c5f44","Type":"ContainerDied","Data":"4986b6bbb20447e0d442b323cee8ab8a1fd08d61324b9722efd9cb4ceb7894d0"} Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.566250 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.651598 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-utilities\") pod \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.651702 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkmrd\" (UniqueName: \"kubernetes.io/projected/ebb77cfc-306e-4edb-ad40-f65db99c5f44-kube-api-access-jkmrd\") pod \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.651849 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-catalog-content\") pod \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\" (UID: \"ebb77cfc-306e-4edb-ad40-f65db99c5f44\") " Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.653194 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-utilities" (OuterVolumeSpecName: "utilities") pod "ebb77cfc-306e-4edb-ad40-f65db99c5f44" (UID: "ebb77cfc-306e-4edb-ad40-f65db99c5f44"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.660008 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebb77cfc-306e-4edb-ad40-f65db99c5f44-kube-api-access-jkmrd" (OuterVolumeSpecName: "kube-api-access-jkmrd") pod "ebb77cfc-306e-4edb-ad40-f65db99c5f44" (UID: "ebb77cfc-306e-4edb-ad40-f65db99c5f44"). InnerVolumeSpecName "kube-api-access-jkmrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.703509 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebb77cfc-306e-4edb-ad40-f65db99c5f44" (UID: "ebb77cfc-306e-4edb-ad40-f65db99c5f44"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.754680 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkmrd\" (UniqueName: \"kubernetes.io/projected/ebb77cfc-306e-4edb-ad40-f65db99c5f44-kube-api-access-jkmrd\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.754709 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:28 crc kubenswrapper[4773]: I0122 14:14:28.754720 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb77cfc-306e-4edb-ad40-f65db99c5f44-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.378589 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cptph" event={"ID":"ebb77cfc-306e-4edb-ad40-f65db99c5f44","Type":"ContainerDied","Data":"fafeab652b964353dae995bfabf9a2ff4e5d95e64a6e2ed4587b31354a8491b5"} Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.378680 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cptph" Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.379091 4773 scope.go:117] "RemoveContainer" containerID="4986b6bbb20447e0d442b323cee8ab8a1fd08d61324b9722efd9cb4ceb7894d0" Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.414851 4773 scope.go:117] "RemoveContainer" containerID="7f9973e6696776bbea912c2421bb375b2de045bbb2cdf78f5d339c42d3b2d0ea" Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.430437 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cptph"] Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.450103 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cptph"] Jan 22 14:14:29 crc kubenswrapper[4773]: I0122 14:14:29.460834 4773 scope.go:117] "RemoveContainer" containerID="4cc2e6bdfb43f8588349fa6fed0230fdac87c7ff7a5dd27ed47e8a884874b409" Jan 22 14:14:30 crc kubenswrapper[4773]: I0122 14:14:30.672269 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" path="/var/lib/kubelet/pods/ebb77cfc-306e-4edb-ad40-f65db99c5f44/volumes" Jan 22 14:14:34 crc kubenswrapper[4773]: I0122 14:14:34.074049 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:14:34 crc kubenswrapper[4773]: I0122 14:14:34.074829 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.167719 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh"] Jan 22 14:15:00 crc kubenswrapper[4773]: E0122 14:15:00.170107 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="extract-utilities" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.170134 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="extract-utilities" Jan 22 14:15:00 crc kubenswrapper[4773]: E0122 14:15:00.170165 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="extract-content" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.170174 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="extract-content" Jan 22 14:15:00 crc kubenswrapper[4773]: E0122 14:15:00.170217 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="registry-server" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.170226 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="registry-server" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.171257 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb77cfc-306e-4edb-ad40-f65db99c5f44" containerName="registry-server" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.172329 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.175780 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.175807 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.190093 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh"] Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.520909 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cdfaad31-846c-4442-8fdb-8abef2bed5df-config-volume\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.521062 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cdfaad31-846c-4442-8fdb-8abef2bed5df-secret-volume\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.521375 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vtls\" (UniqueName: \"kubernetes.io/projected/cdfaad31-846c-4442-8fdb-8abef2bed5df-kube-api-access-9vtls\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.623987 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cdfaad31-846c-4442-8fdb-8abef2bed5df-config-volume\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.624099 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cdfaad31-846c-4442-8fdb-8abef2bed5df-secret-volume\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.624179 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vtls\" (UniqueName: \"kubernetes.io/projected/cdfaad31-846c-4442-8fdb-8abef2bed5df-kube-api-access-9vtls\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.625141 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cdfaad31-846c-4442-8fdb-8abef2bed5df-config-volume\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.629951 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cdfaad31-846c-4442-8fdb-8abef2bed5df-secret-volume\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.644104 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vtls\" (UniqueName: \"kubernetes.io/projected/cdfaad31-846c-4442-8fdb-8abef2bed5df-kube-api-access-9vtls\") pod \"collect-profiles-29484855-tcpsh\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:00 crc kubenswrapper[4773]: I0122 14:15:00.796250 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:01 crc kubenswrapper[4773]: I0122 14:15:01.271709 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh"] Jan 22 14:15:01 crc kubenswrapper[4773]: I0122 14:15:01.836084 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" event={"ID":"cdfaad31-846c-4442-8fdb-8abef2bed5df","Type":"ContainerStarted","Data":"42799ccbd06f9c6b191863eabe94449398668c3f1816ee9998e95ded32776d60"} Jan 22 14:15:01 crc kubenswrapper[4773]: I0122 14:15:01.836499 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" event={"ID":"cdfaad31-846c-4442-8fdb-8abef2bed5df","Type":"ContainerStarted","Data":"606d1f19a7b24bbb4a13282214090b1642b5e7bc8efa13906b88d94cced5153b"} Jan 22 14:15:02 crc kubenswrapper[4773]: I0122 14:15:02.846984 4773 generic.go:334] "Generic (PLEG): container finished" podID="cdfaad31-846c-4442-8fdb-8abef2bed5df" containerID="42799ccbd06f9c6b191863eabe94449398668c3f1816ee9998e95ded32776d60" exitCode=0 Jan 22 14:15:02 crc kubenswrapper[4773]: I0122 14:15:02.847116 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" event={"ID":"cdfaad31-846c-4442-8fdb-8abef2bed5df","Type":"ContainerDied","Data":"42799ccbd06f9c6b191863eabe94449398668c3f1816ee9998e95ded32776d60"} Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.074353 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.074710 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.170713 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.369083 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vtls\" (UniqueName: \"kubernetes.io/projected/cdfaad31-846c-4442-8fdb-8abef2bed5df-kube-api-access-9vtls\") pod \"cdfaad31-846c-4442-8fdb-8abef2bed5df\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.369255 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cdfaad31-846c-4442-8fdb-8abef2bed5df-config-volume\") pod \"cdfaad31-846c-4442-8fdb-8abef2bed5df\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.369403 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cdfaad31-846c-4442-8fdb-8abef2bed5df-secret-volume\") pod \"cdfaad31-846c-4442-8fdb-8abef2bed5df\" (UID: \"cdfaad31-846c-4442-8fdb-8abef2bed5df\") " Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.369983 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdfaad31-846c-4442-8fdb-8abef2bed5df-config-volume" (OuterVolumeSpecName: "config-volume") pod "cdfaad31-846c-4442-8fdb-8abef2bed5df" (UID: "cdfaad31-846c-4442-8fdb-8abef2bed5df"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.371684 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cdfaad31-846c-4442-8fdb-8abef2bed5df-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.375652 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdfaad31-846c-4442-8fdb-8abef2bed5df-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cdfaad31-846c-4442-8fdb-8abef2bed5df" (UID: "cdfaad31-846c-4442-8fdb-8abef2bed5df"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.376589 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdfaad31-846c-4442-8fdb-8abef2bed5df-kube-api-access-9vtls" (OuterVolumeSpecName: "kube-api-access-9vtls") pod "cdfaad31-846c-4442-8fdb-8abef2bed5df" (UID: "cdfaad31-846c-4442-8fdb-8abef2bed5df"). InnerVolumeSpecName "kube-api-access-9vtls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.474421 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cdfaad31-846c-4442-8fdb-8abef2bed5df-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.474462 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vtls\" (UniqueName: \"kubernetes.io/projected/cdfaad31-846c-4442-8fdb-8abef2bed5df-kube-api-access-9vtls\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.535580 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq"] Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.551202 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484810-gvgcq"] Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.671126 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84cab1d5-65aa-4c69-854e-f1e1d7f87c08" path="/var/lib/kubelet/pods/84cab1d5-65aa-4c69-854e-f1e1d7f87c08/volumes" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.872100 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" event={"ID":"cdfaad31-846c-4442-8fdb-8abef2bed5df","Type":"ContainerDied","Data":"606d1f19a7b24bbb4a13282214090b1642b5e7bc8efa13906b88d94cced5153b"} Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.872144 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="606d1f19a7b24bbb4a13282214090b1642b5e7bc8efa13906b88d94cced5153b" Jan 22 14:15:04 crc kubenswrapper[4773]: I0122 14:15:04.872248 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh" Jan 22 14:15:09 crc kubenswrapper[4773]: I0122 14:15:09.921346 4773 generic.go:334] "Generic (PLEG): container finished" podID="48876acf-700b-43be-94ae-7773bf8e8cbf" containerID="8001a3b339412536f0e56036ea08d969d21d2b26390af3cfb9dc75e8f9b55615" exitCode=0 Jan 22 14:15:09 crc kubenswrapper[4773]: I0122 14:15:09.921456 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" event={"ID":"48876acf-700b-43be-94ae-7773bf8e8cbf","Type":"ContainerDied","Data":"8001a3b339412536f0e56036ea08d969d21d2b26390af3cfb9dc75e8f9b55615"} Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.402132 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.433775 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-inventory\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.433898 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-0\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434006 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cells-global-config-0\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434131 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-0\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434314 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-1\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434398 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-combined-ca-bundle\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434484 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq4h7\" (UniqueName: \"kubernetes.io/projected/48876acf-700b-43be-94ae-7773bf8e8cbf-kube-api-access-jq4h7\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434585 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-ssh-key-openstack-cell1\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.434639 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-1\") pod \"48876acf-700b-43be-94ae-7773bf8e8cbf\" (UID: \"48876acf-700b-43be-94ae-7773bf8e8cbf\") " Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.442689 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48876acf-700b-43be-94ae-7773bf8e8cbf-kube-api-access-jq4h7" (OuterVolumeSpecName: "kube-api-access-jq4h7") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "kube-api-access-jq4h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.450150 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.481079 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.485574 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-inventory" (OuterVolumeSpecName: "inventory") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.491942 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.492688 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.495412 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.505431 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.510962 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "48876acf-700b-43be-94ae-7773bf8e8cbf" (UID: "48876acf-700b-43be-94ae-7773bf8e8cbf"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537162 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537189 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537200 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537210 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537218 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537227 4773 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537235 4773 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537243 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48876acf-700b-43be-94ae-7773bf8e8cbf-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.537251 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq4h7\" (UniqueName: \"kubernetes.io/projected/48876acf-700b-43be-94ae-7773bf8e8cbf-kube-api-access-jq4h7\") on node \"crc\" DevicePath \"\"" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.956492 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" event={"ID":"48876acf-700b-43be-94ae-7773bf8e8cbf","Type":"ContainerDied","Data":"c4693a27bfa72065c13208c4a07c31e5b16fba536204ae41666c9451886e5b3d"} Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.956570 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4693a27bfa72065c13208c4a07c31e5b16fba536204ae41666c9451886e5b3d" Jan 22 14:15:11 crc kubenswrapper[4773]: I0122 14:15:11.956614 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-flgnp" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.067727 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-b4pq2"] Jan 22 14:15:12 crc kubenswrapper[4773]: E0122 14:15:12.068317 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdfaad31-846c-4442-8fdb-8abef2bed5df" containerName="collect-profiles" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.068342 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdfaad31-846c-4442-8fdb-8abef2bed5df" containerName="collect-profiles" Jan 22 14:15:12 crc kubenswrapper[4773]: E0122 14:15:12.068361 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48876acf-700b-43be-94ae-7773bf8e8cbf" containerName="nova-cell1-openstack-openstack-cell1" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.068368 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="48876acf-700b-43be-94ae-7773bf8e8cbf" containerName="nova-cell1-openstack-openstack-cell1" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.068574 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="48876acf-700b-43be-94ae-7773bf8e8cbf" containerName="nova-cell1-openstack-openstack-cell1" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.068603 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdfaad31-846c-4442-8fdb-8abef2bed5df" containerName="collect-profiles" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.069490 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.071745 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.072057 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.072581 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.072941 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.073663 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.080597 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-b4pq2"] Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.215755 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.215810 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.215879 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5zz2\" (UniqueName: \"kubernetes.io/projected/f6608269-1f5b-47d0-9cd4-fcf4410bb039-kube-api-access-f5zz2\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.216355 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.216411 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.216463 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-inventory\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.216683 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318421 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318498 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318539 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318582 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5zz2\" (UniqueName: \"kubernetes.io/projected/f6608269-1f5b-47d0-9cd4-fcf4410bb039-kube-api-access-f5zz2\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318742 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318770 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.318805 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-inventory\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.323515 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-inventory\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.325962 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.326080 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.328039 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.329236 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.329544 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ssh-key-openstack-cell1\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.336593 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5zz2\" (UniqueName: \"kubernetes.io/projected/f6608269-1f5b-47d0-9cd4-fcf4410bb039-kube-api-access-f5zz2\") pod \"telemetry-openstack-openstack-cell1-b4pq2\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.386986 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.946985 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-b4pq2"] Jan 22 14:15:12 crc kubenswrapper[4773]: I0122 14:15:12.975429 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" event={"ID":"f6608269-1f5b-47d0-9cd4-fcf4410bb039","Type":"ContainerStarted","Data":"345a05e5ffde2f5e6e4a2f4eae896d244df0d855e747821784016bf77fe492fc"} Jan 22 14:15:13 crc kubenswrapper[4773]: I0122 14:15:13.988410 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" event={"ID":"f6608269-1f5b-47d0-9cd4-fcf4410bb039","Type":"ContainerStarted","Data":"97e98acf7a0f711ecb6a2179307af6c4cd8e5bda74d66693203b333937bf7755"} Jan 22 14:15:14 crc kubenswrapper[4773]: I0122 14:15:14.017881 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" podStartSLOduration=1.495971502 podStartE2EDuration="2.017857555s" podCreationTimestamp="2026-01-22 14:15:12 +0000 UTC" firstStartedPulling="2026-01-22 14:15:12.948701891 +0000 UTC m=+8420.526817716" lastFinishedPulling="2026-01-22 14:15:13.470587894 +0000 UTC m=+8421.048703769" observedRunningTime="2026-01-22 14:15:14.011691961 +0000 UTC m=+8421.589807796" watchObservedRunningTime="2026-01-22 14:15:14.017857555 +0000 UTC m=+8421.595973380" Jan 22 14:15:14 crc kubenswrapper[4773]: I0122 14:15:14.715048 4773 scope.go:117] "RemoveContainer" containerID="f932dd6b501dc64f80a735fa6c9653b9a45c3709693388758fda1100364049c2" Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.075303 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.075874 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.075929 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.076947 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.077015 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" gracePeriod=600 Jan 22 14:15:34 crc kubenswrapper[4773]: E0122 14:15:34.202835 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.336414 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" exitCode=0 Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.336490 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad"} Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.336751 4773 scope.go:117] "RemoveContainer" containerID="9a9766962cd095cd50b29838c89ec652a579111c490c476653932e62ff669648" Jan 22 14:15:34 crc kubenswrapper[4773]: I0122 14:15:34.337570 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:15:34 crc kubenswrapper[4773]: E0122 14:15:34.337992 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:15:47 crc kubenswrapper[4773]: I0122 14:15:47.659842 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:15:47 crc kubenswrapper[4773]: E0122 14:15:47.660814 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:16:02 crc kubenswrapper[4773]: I0122 14:16:02.664834 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:16:02 crc kubenswrapper[4773]: E0122 14:16:02.665724 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:16:13 crc kubenswrapper[4773]: I0122 14:16:13.658077 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:16:13 crc kubenswrapper[4773]: E0122 14:16:13.659772 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:16:28 crc kubenswrapper[4773]: I0122 14:16:28.658497 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:16:28 crc kubenswrapper[4773]: E0122 14:16:28.659192 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:16:42 crc kubenswrapper[4773]: I0122 14:16:42.668878 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:16:42 crc kubenswrapper[4773]: E0122 14:16:42.669593 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:16:56 crc kubenswrapper[4773]: I0122 14:16:56.658978 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:16:56 crc kubenswrapper[4773]: E0122 14:16:56.659796 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:17:10 crc kubenswrapper[4773]: I0122 14:17:10.658606 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:17:10 crc kubenswrapper[4773]: E0122 14:17:10.659501 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:17:22 crc kubenswrapper[4773]: I0122 14:17:22.685923 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:17:22 crc kubenswrapper[4773]: E0122 14:17:22.687075 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:17:33 crc kubenswrapper[4773]: I0122 14:17:33.659545 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:17:33 crc kubenswrapper[4773]: E0122 14:17:33.660416 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:17:47 crc kubenswrapper[4773]: I0122 14:17:47.658778 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:17:47 crc kubenswrapper[4773]: E0122 14:17:47.659510 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:18:01 crc kubenswrapper[4773]: I0122 14:18:01.658503 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:18:01 crc kubenswrapper[4773]: E0122 14:18:01.659400 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:18:16 crc kubenswrapper[4773]: I0122 14:18:16.658334 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:18:16 crc kubenswrapper[4773]: E0122 14:18:16.659092 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:18:27 crc kubenswrapper[4773]: I0122 14:18:27.657899 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:18:27 crc kubenswrapper[4773]: E0122 14:18:27.658738 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:18:38 crc kubenswrapper[4773]: I0122 14:18:38.659169 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:18:38 crc kubenswrapper[4773]: E0122 14:18:38.659870 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:18:45 crc kubenswrapper[4773]: I0122 14:18:45.464127 4773 generic.go:334] "Generic (PLEG): container finished" podID="f6608269-1f5b-47d0-9cd4-fcf4410bb039" containerID="97e98acf7a0f711ecb6a2179307af6c4cd8e5bda74d66693203b333937bf7755" exitCode=0 Jan 22 14:18:45 crc kubenswrapper[4773]: I0122 14:18:45.464729 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" event={"ID":"f6608269-1f5b-47d0-9cd4-fcf4410bb039","Type":"ContainerDied","Data":"97e98acf7a0f711ecb6a2179307af6c4cd8e5bda74d66693203b333937bf7755"} Jan 22 14:18:46 crc kubenswrapper[4773]: I0122 14:18:46.969591 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.092606 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5zz2\" (UniqueName: \"kubernetes.io/projected/f6608269-1f5b-47d0-9cd4-fcf4410bb039-kube-api-access-f5zz2\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.092719 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-2\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.092847 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-1\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.092887 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-inventory\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.092937 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ssh-key-openstack-cell1\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.093000 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-0\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.093071 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-telemetry-combined-ca-bundle\") pod \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\" (UID: \"f6608269-1f5b-47d0-9cd4-fcf4410bb039\") " Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.099062 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.099640 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6608269-1f5b-47d0-9cd4-fcf4410bb039-kube-api-access-f5zz2" (OuterVolumeSpecName: "kube-api-access-f5zz2") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "kube-api-access-f5zz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.128816 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.130016 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.132278 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.133807 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-inventory" (OuterVolumeSpecName: "inventory") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.143607 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "f6608269-1f5b-47d0-9cd4-fcf4410bb039" (UID: "f6608269-1f5b-47d0-9cd4-fcf4410bb039"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196335 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196380 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196393 4773 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196404 4773 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196414 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5zz2\" (UniqueName: \"kubernetes.io/projected/f6608269-1f5b-47d0-9cd4-fcf4410bb039-kube-api-access-f5zz2\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196424 4773 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.196434 4773 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f6608269-1f5b-47d0-9cd4-fcf4410bb039-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.491644 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" event={"ID":"f6608269-1f5b-47d0-9cd4-fcf4410bb039","Type":"ContainerDied","Data":"345a05e5ffde2f5e6e4a2f4eae896d244df0d855e747821784016bf77fe492fc"} Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.491707 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="345a05e5ffde2f5e6e4a2f4eae896d244df0d855e747821784016bf77fe492fc" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.491768 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-b4pq2" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.610586 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7mndd"] Jan 22 14:18:47 crc kubenswrapper[4773]: E0122 14:18:47.611193 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6608269-1f5b-47d0-9cd4-fcf4410bb039" containerName="telemetry-openstack-openstack-cell1" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.611220 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6608269-1f5b-47d0-9cd4-fcf4410bb039" containerName="telemetry-openstack-openstack-cell1" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.611657 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6608269-1f5b-47d0-9cd4-fcf4410bb039" containerName="telemetry-openstack-openstack-cell1" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.612665 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.615144 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.615421 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.616965 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.617130 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.617330 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.625451 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7mndd"] Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.808411 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpwk2\" (UniqueName: \"kubernetes.io/projected/5eb9f34d-70a6-4862-a2af-2d9811f5610a-kube-api-access-wpwk2\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.809253 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.809427 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.809750 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.809808 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.912456 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.912769 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.912876 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.912928 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.913017 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpwk2\" (UniqueName: \"kubernetes.io/projected/5eb9f34d-70a6-4862-a2af-2d9811f5610a-kube-api-access-wpwk2\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.918521 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.919115 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.920535 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-ssh-key-openstack-cell1\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.924510 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.932499 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpwk2\" (UniqueName: \"kubernetes.io/projected/5eb9f34d-70a6-4862-a2af-2d9811f5610a-kube-api-access-wpwk2\") pod \"neutron-sriov-openstack-openstack-cell1-7mndd\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:47 crc kubenswrapper[4773]: I0122 14:18:47.939473 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:18:48 crc kubenswrapper[4773]: I0122 14:18:48.489610 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-7mndd"] Jan 22 14:18:48 crc kubenswrapper[4773]: I0122 14:18:48.509042 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" event={"ID":"5eb9f34d-70a6-4862-a2af-2d9811f5610a","Type":"ContainerStarted","Data":"9d7fe98522aae1aa8c8d63f44e3af4fe661a6a7da4082b50ceb2ed75fc4a47da"} Jan 22 14:18:49 crc kubenswrapper[4773]: I0122 14:18:49.519171 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" event={"ID":"5eb9f34d-70a6-4862-a2af-2d9811f5610a","Type":"ContainerStarted","Data":"b9c59349a5a4d9c5cf160c6ac571648b61c9548ab2c5ed0c68805c43f484f94c"} Jan 22 14:18:49 crc kubenswrapper[4773]: I0122 14:18:49.550881 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" podStartSLOduration=2.1419590729999998 podStartE2EDuration="2.550861794s" podCreationTimestamp="2026-01-22 14:18:47 +0000 UTC" firstStartedPulling="2026-01-22 14:18:48.498758742 +0000 UTC m=+8636.076874567" lastFinishedPulling="2026-01-22 14:18:48.907661463 +0000 UTC m=+8636.485777288" observedRunningTime="2026-01-22 14:18:49.536155077 +0000 UTC m=+8637.114270902" watchObservedRunningTime="2026-01-22 14:18:49.550861794 +0000 UTC m=+8637.128977619" Jan 22 14:18:49 crc kubenswrapper[4773]: I0122 14:18:49.657985 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:18:49 crc kubenswrapper[4773]: E0122 14:18:49.658269 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:19:00 crc kubenswrapper[4773]: I0122 14:19:00.658111 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:19:00 crc kubenswrapper[4773]: E0122 14:19:00.659025 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:19:11 crc kubenswrapper[4773]: I0122 14:19:11.658115 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:19:11 crc kubenswrapper[4773]: E0122 14:19:11.660427 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:19:23 crc kubenswrapper[4773]: I0122 14:19:23.919916 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dxspf"] Jan 22 14:19:23 crc kubenswrapper[4773]: I0122 14:19:23.922936 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:23 crc kubenswrapper[4773]: I0122 14:19:23.930810 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dxspf"] Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.079573 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-utilities\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.079628 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-catalog-content\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.080502 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sbqb\" (UniqueName: \"kubernetes.io/projected/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-kube-api-access-8sbqb\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.182460 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-utilities\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.182516 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-catalog-content\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.182666 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sbqb\" (UniqueName: \"kubernetes.io/projected/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-kube-api-access-8sbqb\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.183301 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-utilities\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.183491 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-catalog-content\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.207952 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sbqb\" (UniqueName: \"kubernetes.io/projected/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-kube-api-access-8sbqb\") pod \"redhat-marketplace-dxspf\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.242728 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.563021 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dxspf"] Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.658200 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:19:24 crc kubenswrapper[4773]: E0122 14:19:24.658520 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:19:24 crc kubenswrapper[4773]: I0122 14:19:24.905381 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dxspf" event={"ID":"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0","Type":"ContainerStarted","Data":"082fccab2064d9efbf1e20f5c12ceb755cae1af93ec1ce9dcc8a2da59e93c674"} Jan 22 14:19:25 crc kubenswrapper[4773]: I0122 14:19:25.917453 4773 generic.go:334] "Generic (PLEG): container finished" podID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerID="3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8" exitCode=0 Jan 22 14:19:25 crc kubenswrapper[4773]: I0122 14:19:25.917540 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dxspf" event={"ID":"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0","Type":"ContainerDied","Data":"3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8"} Jan 22 14:19:25 crc kubenswrapper[4773]: I0122 14:19:25.930215 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:19:27 crc kubenswrapper[4773]: I0122 14:19:27.946839 4773 generic.go:334] "Generic (PLEG): container finished" podID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerID="d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16" exitCode=0 Jan 22 14:19:27 crc kubenswrapper[4773]: I0122 14:19:27.946938 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dxspf" event={"ID":"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0","Type":"ContainerDied","Data":"d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16"} Jan 22 14:19:28 crc kubenswrapper[4773]: I0122 14:19:28.963024 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dxspf" event={"ID":"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0","Type":"ContainerStarted","Data":"82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b"} Jan 22 14:19:28 crc kubenswrapper[4773]: I0122 14:19:28.995492 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dxspf" podStartSLOduration=3.281565854 podStartE2EDuration="5.995443218s" podCreationTimestamp="2026-01-22 14:19:23 +0000 UTC" firstStartedPulling="2026-01-22 14:19:25.929890275 +0000 UTC m=+8673.508006100" lastFinishedPulling="2026-01-22 14:19:28.643767639 +0000 UTC m=+8676.221883464" observedRunningTime="2026-01-22 14:19:28.983010655 +0000 UTC m=+8676.561126480" watchObservedRunningTime="2026-01-22 14:19:28.995443218 +0000 UTC m=+8676.573559043" Jan 22 14:19:34 crc kubenswrapper[4773]: I0122 14:19:34.243312 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:34 crc kubenswrapper[4773]: I0122 14:19:34.243845 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:34 crc kubenswrapper[4773]: I0122 14:19:34.301265 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:35 crc kubenswrapper[4773]: I0122 14:19:35.076016 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:35 crc kubenswrapper[4773]: I0122 14:19:35.138232 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dxspf"] Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.036051 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dxspf" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="registry-server" containerID="cri-o://82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b" gracePeriod=2 Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.534789 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.635951 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-utilities\") pod \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.636003 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-catalog-content\") pod \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.636028 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sbqb\" (UniqueName: \"kubernetes.io/projected/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-kube-api-access-8sbqb\") pod \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\" (UID: \"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0\") " Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.637023 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-utilities" (OuterVolumeSpecName: "utilities") pod "de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" (UID: "de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.641461 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-kube-api-access-8sbqb" (OuterVolumeSpecName: "kube-api-access-8sbqb") pod "de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" (UID: "de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0"). InnerVolumeSpecName "kube-api-access-8sbqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.658095 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" (UID: "de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.659044 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:19:37 crc kubenswrapper[4773]: E0122 14:19:37.659522 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.739774 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.739822 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:37 crc kubenswrapper[4773]: I0122 14:19:37.739840 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sbqb\" (UniqueName: \"kubernetes.io/projected/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0-kube-api-access-8sbqb\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.089053 4773 generic.go:334] "Generic (PLEG): container finished" podID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerID="82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b" exitCode=0 Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.089173 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dxspf" event={"ID":"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0","Type":"ContainerDied","Data":"82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b"} Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.089934 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dxspf" event={"ID":"de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0","Type":"ContainerDied","Data":"082fccab2064d9efbf1e20f5c12ceb755cae1af93ec1ce9dcc8a2da59e93c674"} Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.089260 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dxspf" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.089983 4773 scope.go:117] "RemoveContainer" containerID="82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.113589 4773 scope.go:117] "RemoveContainer" containerID="d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.133986 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dxspf"] Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.145558 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dxspf"] Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.151499 4773 scope.go:117] "RemoveContainer" containerID="3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.206025 4773 scope.go:117] "RemoveContainer" containerID="82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b" Jan 22 14:19:38 crc kubenswrapper[4773]: E0122 14:19:38.206809 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b\": container with ID starting with 82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b not found: ID does not exist" containerID="82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.206870 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b"} err="failed to get container status \"82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b\": rpc error: code = NotFound desc = could not find container \"82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b\": container with ID starting with 82bb21fe73d675141460ac3e0635eb8e8f49f7779f9ad91b945c75d5034ff10b not found: ID does not exist" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.206909 4773 scope.go:117] "RemoveContainer" containerID="d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16" Jan 22 14:19:38 crc kubenswrapper[4773]: E0122 14:19:38.207412 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16\": container with ID starting with d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16 not found: ID does not exist" containerID="d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.207464 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16"} err="failed to get container status \"d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16\": rpc error: code = NotFound desc = could not find container \"d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16\": container with ID starting with d464990a3aef4ce94a8b307f792b723f2e3e4947ae2cad750231cd4d16590b16 not found: ID does not exist" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.207502 4773 scope.go:117] "RemoveContainer" containerID="3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8" Jan 22 14:19:38 crc kubenswrapper[4773]: E0122 14:19:38.208105 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8\": container with ID starting with 3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8 not found: ID does not exist" containerID="3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.208183 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8"} err="failed to get container status \"3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8\": rpc error: code = NotFound desc = could not find container \"3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8\": container with ID starting with 3d16087227280869f2ebc2617770283d6e85d0683f0afd5300651edc9b49a6d8 not found: ID does not exist" Jan 22 14:19:38 crc kubenswrapper[4773]: I0122 14:19:38.673788 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" path="/var/lib/kubelet/pods/de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0/volumes" Jan 22 14:19:48 crc kubenswrapper[4773]: I0122 14:19:48.658966 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:19:48 crc kubenswrapper[4773]: E0122 14:19:48.659889 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:19:52 crc kubenswrapper[4773]: I0122 14:19:52.242622 4773 generic.go:334] "Generic (PLEG): container finished" podID="5eb9f34d-70a6-4862-a2af-2d9811f5610a" containerID="b9c59349a5a4d9c5cf160c6ac571648b61c9548ab2c5ed0c68805c43f484f94c" exitCode=0 Jan 22 14:19:52 crc kubenswrapper[4773]: I0122 14:19:52.242755 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" event={"ID":"5eb9f34d-70a6-4862-a2af-2d9811f5610a","Type":"ContainerDied","Data":"b9c59349a5a4d9c5cf160c6ac571648b61c9548ab2c5ed0c68805c43f484f94c"} Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.724883 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.887749 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-agent-neutron-config-0\") pod \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.887935 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-ssh-key-openstack-cell1\") pod \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.887967 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-combined-ca-bundle\") pod \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.888805 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpwk2\" (UniqueName: \"kubernetes.io/projected/5eb9f34d-70a6-4862-a2af-2d9811f5610a-kube-api-access-wpwk2\") pod \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.888904 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-inventory\") pod \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\" (UID: \"5eb9f34d-70a6-4862-a2af-2d9811f5610a\") " Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.895921 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eb9f34d-70a6-4862-a2af-2d9811f5610a-kube-api-access-wpwk2" (OuterVolumeSpecName: "kube-api-access-wpwk2") pod "5eb9f34d-70a6-4862-a2af-2d9811f5610a" (UID: "5eb9f34d-70a6-4862-a2af-2d9811f5610a"). InnerVolumeSpecName "kube-api-access-wpwk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.896103 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "5eb9f34d-70a6-4862-a2af-2d9811f5610a" (UID: "5eb9f34d-70a6-4862-a2af-2d9811f5610a"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.936462 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "5eb9f34d-70a6-4862-a2af-2d9811f5610a" (UID: "5eb9f34d-70a6-4862-a2af-2d9811f5610a"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.937778 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "5eb9f34d-70a6-4862-a2af-2d9811f5610a" (UID: "5eb9f34d-70a6-4862-a2af-2d9811f5610a"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.938364 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-inventory" (OuterVolumeSpecName: "inventory") pod "5eb9f34d-70a6-4862-a2af-2d9811f5610a" (UID: "5eb9f34d-70a6-4862-a2af-2d9811f5610a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.992956 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpwk2\" (UniqueName: \"kubernetes.io/projected/5eb9f34d-70a6-4862-a2af-2d9811f5610a-kube-api-access-wpwk2\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.993011 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.993023 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.993035 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:53 crc kubenswrapper[4773]: I0122 14:19:53.993053 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eb9f34d-70a6-4862-a2af-2d9811f5610a-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.262697 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" event={"ID":"5eb9f34d-70a6-4862-a2af-2d9811f5610a","Type":"ContainerDied","Data":"9d7fe98522aae1aa8c8d63f44e3af4fe661a6a7da4082b50ceb2ed75fc4a47da"} Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.262740 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d7fe98522aae1aa8c8d63f44e3af4fe661a6a7da4082b50ceb2ed75fc4a47da" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.262756 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-7mndd" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.376856 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc"] Jan 22 14:19:54 crc kubenswrapper[4773]: E0122 14:19:54.377420 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="extract-utilities" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.377445 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="extract-utilities" Jan 22 14:19:54 crc kubenswrapper[4773]: E0122 14:19:54.377479 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="extract-content" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.377487 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="extract-content" Jan 22 14:19:54 crc kubenswrapper[4773]: E0122 14:19:54.377516 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="registry-server" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.377525 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="registry-server" Jan 22 14:19:54 crc kubenswrapper[4773]: E0122 14:19:54.377550 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eb9f34d-70a6-4862-a2af-2d9811f5610a" containerName="neutron-sriov-openstack-openstack-cell1" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.377559 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eb9f34d-70a6-4862-a2af-2d9811f5610a" containerName="neutron-sriov-openstack-openstack-cell1" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.377844 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="de3adb26-9d1f-4f1d-8e18-13efbbdcc3f0" containerName="registry-server" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.377871 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eb9f34d-70a6-4862-a2af-2d9811f5610a" containerName="neutron-sriov-openstack-openstack-cell1" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.378826 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.381382 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.381446 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.382650 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.386497 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.386550 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.394211 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc"] Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.511335 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.511505 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.511729 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf9gg\" (UniqueName: \"kubernetes.io/projected/42c256b3-a909-4067-892d-520fc7972418-kube-api-access-jf9gg\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.512086 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.512138 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.614803 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf9gg\" (UniqueName: \"kubernetes.io/projected/42c256b3-a909-4067-892d-520fc7972418-kube-api-access-jf9gg\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.614980 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.615015 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.615225 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.615306 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.619930 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.620079 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.620512 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-ssh-key-openstack-cell1\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.623026 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.633449 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf9gg\" (UniqueName: \"kubernetes.io/projected/42c256b3-a909-4067-892d-520fc7972418-kube-api-access-jf9gg\") pod \"neutron-dhcp-openstack-openstack-cell1-j7dqc\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:54 crc kubenswrapper[4773]: I0122 14:19:54.702175 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:19:55 crc kubenswrapper[4773]: I0122 14:19:55.316027 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc"] Jan 22 14:19:56 crc kubenswrapper[4773]: I0122 14:19:56.328663 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" event={"ID":"42c256b3-a909-4067-892d-520fc7972418","Type":"ContainerStarted","Data":"ee4ed400ae0ad133ef6cd3a31b18568051571d45964bd342aaaafd789a034ebf"} Jan 22 14:19:57 crc kubenswrapper[4773]: I0122 14:19:57.340084 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" event={"ID":"42c256b3-a909-4067-892d-520fc7972418","Type":"ContainerStarted","Data":"4a452937c8ec46e8b0dd5d4889978ef98397e3b908ab3f7edfd1bf4d56745d66"} Jan 22 14:19:58 crc kubenswrapper[4773]: I0122 14:19:58.377585 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" podStartSLOduration=3.225513939 podStartE2EDuration="4.377559553s" podCreationTimestamp="2026-01-22 14:19:54 +0000 UTC" firstStartedPulling="2026-01-22 14:19:55.318039372 +0000 UTC m=+8702.896155197" lastFinishedPulling="2026-01-22 14:19:56.470084986 +0000 UTC m=+8704.048200811" observedRunningTime="2026-01-22 14:19:58.368955059 +0000 UTC m=+8705.947070884" watchObservedRunningTime="2026-01-22 14:19:58.377559553 +0000 UTC m=+8705.955675378" Jan 22 14:19:59 crc kubenswrapper[4773]: I0122 14:19:59.658736 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:19:59 crc kubenswrapper[4773]: E0122 14:19:59.659670 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:20:13 crc kubenswrapper[4773]: I0122 14:20:13.658487 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:20:13 crc kubenswrapper[4773]: E0122 14:20:13.659378 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:20:27 crc kubenswrapper[4773]: I0122 14:20:27.658326 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:20:27 crc kubenswrapper[4773]: E0122 14:20:27.659101 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.110259 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-86ldw"] Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.112756 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.127812 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86ldw"] Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.212733 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmz7n\" (UniqueName: \"kubernetes.io/projected/da32627d-ed09-44b5-a5ed-b8fe46fc4828-kube-api-access-qmz7n\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.213332 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-utilities\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.213524 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-catalog-content\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.315729 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-utilities\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.315858 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-catalog-content\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.315974 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmz7n\" (UniqueName: \"kubernetes.io/projected/da32627d-ed09-44b5-a5ed-b8fe46fc4828-kube-api-access-qmz7n\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.316459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-utilities\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.316474 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-catalog-content\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.354335 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmz7n\" (UniqueName: \"kubernetes.io/projected/da32627d-ed09-44b5-a5ed-b8fe46fc4828-kube-api-access-qmz7n\") pod \"redhat-operators-86ldw\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:28 crc kubenswrapper[4773]: I0122 14:20:28.434486 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:29 crc kubenswrapper[4773]: I0122 14:20:29.040732 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-86ldw"] Jan 22 14:20:29 crc kubenswrapper[4773]: I0122 14:20:29.694468 4773 generic.go:334] "Generic (PLEG): container finished" podID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerID="550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7" exitCode=0 Jan 22 14:20:29 crc kubenswrapper[4773]: I0122 14:20:29.694569 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerDied","Data":"550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7"} Jan 22 14:20:29 crc kubenswrapper[4773]: I0122 14:20:29.695065 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerStarted","Data":"b3e981a9f701249e5c7ddd59577f81a707508a147886c95de7b06e5be0f9a08b"} Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.306342 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rvhzk"] Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.309224 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.323084 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rvhzk"] Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.467551 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-utilities\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.467641 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-catalog-content\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.468334 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vnz2\" (UniqueName: \"kubernetes.io/projected/527ccab1-972a-4053-8dfd-2959d2634200-kube-api-access-9vnz2\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.572302 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vnz2\" (UniqueName: \"kubernetes.io/projected/527ccab1-972a-4053-8dfd-2959d2634200-kube-api-access-9vnz2\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.572389 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-utilities\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.572443 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-catalog-content\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.573062 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-utilities\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.573169 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-catalog-content\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.595712 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vnz2\" (UniqueName: \"kubernetes.io/projected/527ccab1-972a-4053-8dfd-2959d2634200-kube-api-access-9vnz2\") pod \"certified-operators-rvhzk\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:30 crc kubenswrapper[4773]: I0122 14:20:30.640088 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:31 crc kubenswrapper[4773]: I0122 14:20:31.230945 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rvhzk"] Jan 22 14:20:31 crc kubenswrapper[4773]: W0122 14:20:31.231628 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod527ccab1_972a_4053_8dfd_2959d2634200.slice/crio-f0feab76b454db7b444997a3370dc9507419aca4623786898d6c7a1dd1d18426 WatchSource:0}: Error finding container f0feab76b454db7b444997a3370dc9507419aca4623786898d6c7a1dd1d18426: Status 404 returned error can't find the container with id f0feab76b454db7b444997a3370dc9507419aca4623786898d6c7a1dd1d18426 Jan 22 14:20:31 crc kubenswrapper[4773]: I0122 14:20:31.730589 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerStarted","Data":"9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1"} Jan 22 14:20:31 crc kubenswrapper[4773]: I0122 14:20:31.733559 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerStarted","Data":"f0feab76b454db7b444997a3370dc9507419aca4623786898d6c7a1dd1d18426"} Jan 22 14:20:32 crc kubenswrapper[4773]: I0122 14:20:32.772938 4773 generic.go:334] "Generic (PLEG): container finished" podID="527ccab1-972a-4053-8dfd-2959d2634200" containerID="19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6" exitCode=0 Jan 22 14:20:32 crc kubenswrapper[4773]: I0122 14:20:32.774592 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerDied","Data":"19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6"} Jan 22 14:20:34 crc kubenswrapper[4773]: I0122 14:20:34.797576 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerStarted","Data":"2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078"} Jan 22 14:20:36 crc kubenswrapper[4773]: I0122 14:20:36.831803 4773 generic.go:334] "Generic (PLEG): container finished" podID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerID="9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1" exitCode=0 Jan 22 14:20:36 crc kubenswrapper[4773]: I0122 14:20:36.831891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerDied","Data":"9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1"} Jan 22 14:20:37 crc kubenswrapper[4773]: I0122 14:20:37.843105 4773 generic.go:334] "Generic (PLEG): container finished" podID="527ccab1-972a-4053-8dfd-2959d2634200" containerID="2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078" exitCode=0 Jan 22 14:20:37 crc kubenswrapper[4773]: I0122 14:20:37.843157 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerDied","Data":"2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078"} Jan 22 14:20:38 crc kubenswrapper[4773]: I0122 14:20:38.854924 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerStarted","Data":"366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6"} Jan 22 14:20:38 crc kubenswrapper[4773]: I0122 14:20:38.867356 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerStarted","Data":"5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b"} Jan 22 14:20:38 crc kubenswrapper[4773]: I0122 14:20:38.897635 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-86ldw" podStartSLOduration=2.201410713 podStartE2EDuration="10.897600418s" podCreationTimestamp="2026-01-22 14:20:28 +0000 UTC" firstStartedPulling="2026-01-22 14:20:29.696689269 +0000 UTC m=+8737.274805094" lastFinishedPulling="2026-01-22 14:20:38.392878974 +0000 UTC m=+8745.970994799" observedRunningTime="2026-01-22 14:20:38.874993267 +0000 UTC m=+8746.453109102" watchObservedRunningTime="2026-01-22 14:20:38.897600418 +0000 UTC m=+8746.475716243" Jan 22 14:20:38 crc kubenswrapper[4773]: I0122 14:20:38.915244 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rvhzk" podStartSLOduration=3.23439359 podStartE2EDuration="8.915221688s" podCreationTimestamp="2026-01-22 14:20:30 +0000 UTC" firstStartedPulling="2026-01-22 14:20:32.776976536 +0000 UTC m=+8740.355092371" lastFinishedPulling="2026-01-22 14:20:38.457804634 +0000 UTC m=+8746.035920469" observedRunningTime="2026-01-22 14:20:38.905910944 +0000 UTC m=+8746.484026769" watchObservedRunningTime="2026-01-22 14:20:38.915221688 +0000 UTC m=+8746.493337503" Jan 22 14:20:39 crc kubenswrapper[4773]: I0122 14:20:39.657583 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:20:40 crc kubenswrapper[4773]: I0122 14:20:40.640955 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:40 crc kubenswrapper[4773]: I0122 14:20:40.641271 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:40 crc kubenswrapper[4773]: I0122 14:20:40.967944 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"f24ce25274805a35369c26a155360435b5830f3b7c26eca6af51ea6c67fa9bc6"} Jan 22 14:20:41 crc kubenswrapper[4773]: I0122 14:20:41.697842 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-rvhzk" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="registry-server" probeResult="failure" output=< Jan 22 14:20:41 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:20:41 crc kubenswrapper[4773]: > Jan 22 14:20:48 crc kubenswrapper[4773]: I0122 14:20:48.435676 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:48 crc kubenswrapper[4773]: I0122 14:20:48.436331 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:48 crc kubenswrapper[4773]: I0122 14:20:48.487277 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:49 crc kubenswrapper[4773]: I0122 14:20:49.118598 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:49 crc kubenswrapper[4773]: I0122 14:20:49.174134 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86ldw"] Jan 22 14:20:50 crc kubenswrapper[4773]: I0122 14:20:50.695804 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:50 crc kubenswrapper[4773]: I0122 14:20:50.762399 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.079439 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-86ldw" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="registry-server" containerID="cri-o://366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6" gracePeriod=2 Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.132043 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rvhzk"] Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.646606 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.699066 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-utilities\") pod \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.699579 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmz7n\" (UniqueName: \"kubernetes.io/projected/da32627d-ed09-44b5-a5ed-b8fe46fc4828-kube-api-access-qmz7n\") pod \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.699676 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-catalog-content\") pod \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\" (UID: \"da32627d-ed09-44b5-a5ed-b8fe46fc4828\") " Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.699832 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-utilities" (OuterVolumeSpecName: "utilities") pod "da32627d-ed09-44b5-a5ed-b8fe46fc4828" (UID: "da32627d-ed09-44b5-a5ed-b8fe46fc4828"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.700600 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.708642 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da32627d-ed09-44b5-a5ed-b8fe46fc4828-kube-api-access-qmz7n" (OuterVolumeSpecName: "kube-api-access-qmz7n") pod "da32627d-ed09-44b5-a5ed-b8fe46fc4828" (UID: "da32627d-ed09-44b5-a5ed-b8fe46fc4828"). InnerVolumeSpecName "kube-api-access-qmz7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.810301 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmz7n\" (UniqueName: \"kubernetes.io/projected/da32627d-ed09-44b5-a5ed-b8fe46fc4828-kube-api-access-qmz7n\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.826259 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da32627d-ed09-44b5-a5ed-b8fe46fc4828" (UID: "da32627d-ed09-44b5-a5ed-b8fe46fc4828"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:20:51 crc kubenswrapper[4773]: I0122 14:20:51.913398 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da32627d-ed09-44b5-a5ed-b8fe46fc4828-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.097314 4773 generic.go:334] "Generic (PLEG): container finished" podID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerID="366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6" exitCode=0 Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.097460 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-86ldw" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.097516 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerDied","Data":"366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6"} Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.097576 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-86ldw" event={"ID":"da32627d-ed09-44b5-a5ed-b8fe46fc4828","Type":"ContainerDied","Data":"b3e981a9f701249e5c7ddd59577f81a707508a147886c95de7b06e5be0f9a08b"} Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.097656 4773 scope.go:117] "RemoveContainer" containerID="366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.097677 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rvhzk" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="registry-server" containerID="cri-o://5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b" gracePeriod=2 Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.125433 4773 scope.go:117] "RemoveContainer" containerID="9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.154407 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-86ldw"] Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.166710 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-86ldw"] Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.170954 4773 scope.go:117] "RemoveContainer" containerID="550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.286320 4773 scope.go:117] "RemoveContainer" containerID="366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6" Jan 22 14:20:52 crc kubenswrapper[4773]: E0122 14:20:52.286868 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6\": container with ID starting with 366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6 not found: ID does not exist" containerID="366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.286917 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6"} err="failed to get container status \"366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6\": rpc error: code = NotFound desc = could not find container \"366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6\": container with ID starting with 366af107e49a7ebbfffde7902d81657fd0ffcfe4177b78871c79cd9f9bd258b6 not found: ID does not exist" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.286949 4773 scope.go:117] "RemoveContainer" containerID="9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1" Jan 22 14:20:52 crc kubenswrapper[4773]: E0122 14:20:52.287616 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1\": container with ID starting with 9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1 not found: ID does not exist" containerID="9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.287706 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1"} err="failed to get container status \"9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1\": rpc error: code = NotFound desc = could not find container \"9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1\": container with ID starting with 9907fc381d9c2200bd8257097d53653b06160675eba246ebe9e29baaf7e9f2f1 not found: ID does not exist" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.287760 4773 scope.go:117] "RemoveContainer" containerID="550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7" Jan 22 14:20:52 crc kubenswrapper[4773]: E0122 14:20:52.288158 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7\": container with ID starting with 550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7 not found: ID does not exist" containerID="550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.288202 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7"} err="failed to get container status \"550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7\": rpc error: code = NotFound desc = could not find container \"550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7\": container with ID starting with 550cd7fcd9438086a3afd6ee587cc37700be5fed8f5d14652ab87e842d4ce4d7 not found: ID does not exist" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.590035 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.674375 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" path="/var/lib/kubelet/pods/da32627d-ed09-44b5-a5ed-b8fe46fc4828/volumes" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.737488 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-catalog-content\") pod \"527ccab1-972a-4053-8dfd-2959d2634200\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.739600 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vnz2\" (UniqueName: \"kubernetes.io/projected/527ccab1-972a-4053-8dfd-2959d2634200-kube-api-access-9vnz2\") pod \"527ccab1-972a-4053-8dfd-2959d2634200\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.739924 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-utilities\") pod \"527ccab1-972a-4053-8dfd-2959d2634200\" (UID: \"527ccab1-972a-4053-8dfd-2959d2634200\") " Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.740789 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-utilities" (OuterVolumeSpecName: "utilities") pod "527ccab1-972a-4053-8dfd-2959d2634200" (UID: "527ccab1-972a-4053-8dfd-2959d2634200"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.742491 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.745661 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/527ccab1-972a-4053-8dfd-2959d2634200-kube-api-access-9vnz2" (OuterVolumeSpecName: "kube-api-access-9vnz2") pod "527ccab1-972a-4053-8dfd-2959d2634200" (UID: "527ccab1-972a-4053-8dfd-2959d2634200"). InnerVolumeSpecName "kube-api-access-9vnz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.786051 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "527ccab1-972a-4053-8dfd-2959d2634200" (UID: "527ccab1-972a-4053-8dfd-2959d2634200"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.844564 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527ccab1-972a-4053-8dfd-2959d2634200-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:52 crc kubenswrapper[4773]: I0122 14:20:52.844622 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vnz2\" (UniqueName: \"kubernetes.io/projected/527ccab1-972a-4053-8dfd-2959d2634200-kube-api-access-9vnz2\") on node \"crc\" DevicePath \"\"" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.111004 4773 generic.go:334] "Generic (PLEG): container finished" podID="527ccab1-972a-4053-8dfd-2959d2634200" containerID="5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b" exitCode=0 Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.111050 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerDied","Data":"5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b"} Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.111079 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rvhzk" event={"ID":"527ccab1-972a-4053-8dfd-2959d2634200","Type":"ContainerDied","Data":"f0feab76b454db7b444997a3370dc9507419aca4623786898d6c7a1dd1d18426"} Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.111094 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rvhzk" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.111102 4773 scope.go:117] "RemoveContainer" containerID="5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.134339 4773 scope.go:117] "RemoveContainer" containerID="2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.156084 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rvhzk"] Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.175644 4773 scope.go:117] "RemoveContainer" containerID="19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.193071 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rvhzk"] Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.200230 4773 scope.go:117] "RemoveContainer" containerID="5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b" Jan 22 14:20:53 crc kubenswrapper[4773]: E0122 14:20:53.200860 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b\": container with ID starting with 5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b not found: ID does not exist" containerID="5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.200965 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b"} err="failed to get container status \"5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b\": rpc error: code = NotFound desc = could not find container \"5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b\": container with ID starting with 5d45c6dba23f5171e09fa12dfc5178258cadda370a16c1ee82440afeb29e484b not found: ID does not exist" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.201002 4773 scope.go:117] "RemoveContainer" containerID="2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078" Jan 22 14:20:53 crc kubenswrapper[4773]: E0122 14:20:53.201330 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078\": container with ID starting with 2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078 not found: ID does not exist" containerID="2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.201368 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078"} err="failed to get container status \"2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078\": rpc error: code = NotFound desc = could not find container \"2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078\": container with ID starting with 2c2ea4ff77a7da78f20977eca9578756c09fbf34a86972437039a1d4c04ef078 not found: ID does not exist" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.201399 4773 scope.go:117] "RemoveContainer" containerID="19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6" Jan 22 14:20:53 crc kubenswrapper[4773]: E0122 14:20:53.201717 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6\": container with ID starting with 19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6 not found: ID does not exist" containerID="19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6" Jan 22 14:20:53 crc kubenswrapper[4773]: I0122 14:20:53.201750 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6"} err="failed to get container status \"19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6\": rpc error: code = NotFound desc = could not find container \"19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6\": container with ID starting with 19b7248c2b8ef930f848d6de7968a6a7db3e594486184b1728b0b50a7005dbf6 not found: ID does not exist" Jan 22 14:20:54 crc kubenswrapper[4773]: I0122 14:20:54.670409 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="527ccab1-972a-4053-8dfd-2959d2634200" path="/var/lib/kubelet/pods/527ccab1-972a-4053-8dfd-2959d2634200/volumes" Jan 22 14:21:14 crc kubenswrapper[4773]: I0122 14:21:14.354006 4773 generic.go:334] "Generic (PLEG): container finished" podID="42c256b3-a909-4067-892d-520fc7972418" containerID="4a452937c8ec46e8b0dd5d4889978ef98397e3b908ab3f7edfd1bf4d56745d66" exitCode=0 Jan 22 14:21:14 crc kubenswrapper[4773]: I0122 14:21:14.354097 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" event={"ID":"42c256b3-a909-4067-892d-520fc7972418","Type":"ContainerDied","Data":"4a452937c8ec46e8b0dd5d4889978ef98397e3b908ab3f7edfd1bf4d56745d66"} Jan 22 14:21:15 crc kubenswrapper[4773]: I0122 14:21:15.873310 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.025513 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-agent-neutron-config-0\") pod \"42c256b3-a909-4067-892d-520fc7972418\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.025655 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf9gg\" (UniqueName: \"kubernetes.io/projected/42c256b3-a909-4067-892d-520fc7972418-kube-api-access-jf9gg\") pod \"42c256b3-a909-4067-892d-520fc7972418\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.025764 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-inventory\") pod \"42c256b3-a909-4067-892d-520fc7972418\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.025830 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-combined-ca-bundle\") pod \"42c256b3-a909-4067-892d-520fc7972418\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.025949 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-ssh-key-openstack-cell1\") pod \"42c256b3-a909-4067-892d-520fc7972418\" (UID: \"42c256b3-a909-4067-892d-520fc7972418\") " Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.030977 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42c256b3-a909-4067-892d-520fc7972418-kube-api-access-jf9gg" (OuterVolumeSpecName: "kube-api-access-jf9gg") pod "42c256b3-a909-4067-892d-520fc7972418" (UID: "42c256b3-a909-4067-892d-520fc7972418"). InnerVolumeSpecName "kube-api-access-jf9gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.031967 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "42c256b3-a909-4067-892d-520fc7972418" (UID: "42c256b3-a909-4067-892d-520fc7972418"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.055734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "42c256b3-a909-4067-892d-520fc7972418" (UID: "42c256b3-a909-4067-892d-520fc7972418"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.057272 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-inventory" (OuterVolumeSpecName: "inventory") pod "42c256b3-a909-4067-892d-520fc7972418" (UID: "42c256b3-a909-4067-892d-520fc7972418"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.060665 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "42c256b3-a909-4067-892d-520fc7972418" (UID: "42c256b3-a909-4067-892d-520fc7972418"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.129635 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.129887 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.129898 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.129907 4773 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/42c256b3-a909-4067-892d-520fc7972418-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.129918 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf9gg\" (UniqueName: \"kubernetes.io/projected/42c256b3-a909-4067-892d-520fc7972418-kube-api-access-jf9gg\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.383958 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" event={"ID":"42c256b3-a909-4067-892d-520fc7972418","Type":"ContainerDied","Data":"ee4ed400ae0ad133ef6cd3a31b18568051571d45964bd342aaaafd789a034ebf"} Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.384002 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee4ed400ae0ad133ef6cd3a31b18568051571d45964bd342aaaafd789a034ebf" Jan 22 14:21:16 crc kubenswrapper[4773]: I0122 14:21:16.384073 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-j7dqc" Jan 22 14:21:22 crc kubenswrapper[4773]: I0122 14:21:22.458305 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 14:21:22 crc kubenswrapper[4773]: I0122 14:21:22.459220 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="92a09152-089d-4802-ae3a-5e9d84b996c4" containerName="nova-cell0-conductor-conductor" containerID="cri-o://ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89" gracePeriod=30 Jan 22 14:21:22 crc kubenswrapper[4773]: I0122 14:21:22.542178 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 14:21:22 crc kubenswrapper[4773]: I0122 14:21:22.542423 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="6b7deba1-1cc3-4625-a248-3fb1aa70bf69" containerName="nova-cell1-conductor-conductor" containerID="cri-o://f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb" gracePeriod=30 Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.344159 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.345811 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" containerName="nova-scheduler-scheduler" containerID="cri-o://364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd" gracePeriod=30 Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.379307 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.379558 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-log" containerID="cri-o://249f13e2ffe294237aadae46ca90d1274b32f82a5df5bd44c2bce85e7524f918" gracePeriod=30 Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.380058 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-api" containerID="cri-o://09b19ac9cd4cb5d11572c1d77637629883fee5390c4613ceff7dc60aeb60034b" gracePeriod=30 Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.402126 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.404583 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-log" containerID="cri-o://ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636" gracePeriod=30 Jan 22 14:21:23 crc kubenswrapper[4773]: I0122 14:21:23.405212 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-metadata" containerID="cri-o://7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778" gracePeriod=30 Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.283910 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.316726 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-combined-ca-bundle\") pod \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.316785 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-config-data\") pod \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.316826 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqwm9\" (UniqueName: \"kubernetes.io/projected/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-kube-api-access-kqwm9\") pod \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\" (UID: \"6b7deba1-1cc3-4625-a248-3fb1aa70bf69\") " Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.323614 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-kube-api-access-kqwm9" (OuterVolumeSpecName: "kube-api-access-kqwm9") pod "6b7deba1-1cc3-4625-a248-3fb1aa70bf69" (UID: "6b7deba1-1cc3-4625-a248-3fb1aa70bf69"). InnerVolumeSpecName "kube-api-access-kqwm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.357216 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b7deba1-1cc3-4625-a248-3fb1aa70bf69" (UID: "6b7deba1-1cc3-4625-a248-3fb1aa70bf69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.363487 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-config-data" (OuterVolumeSpecName: "config-data") pod "6b7deba1-1cc3-4625-a248-3fb1aa70bf69" (UID: "6b7deba1-1cc3-4625-a248-3fb1aa70bf69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.422107 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.422464 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.422479 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqwm9\" (UniqueName: \"kubernetes.io/projected/6b7deba1-1cc3-4625-a248-3fb1aa70bf69-kube-api-access-kqwm9\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.467992 4773 generic.go:334] "Generic (PLEG): container finished" podID="92a09152-089d-4802-ae3a-5e9d84b996c4" containerID="ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89" exitCode=0 Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.468074 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"92a09152-089d-4802-ae3a-5e9d84b996c4","Type":"ContainerDied","Data":"ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89"} Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.472166 4773 generic.go:334] "Generic (PLEG): container finished" podID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerID="ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636" exitCode=143 Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.472272 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4","Type":"ContainerDied","Data":"ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636"} Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.475166 4773 generic.go:334] "Generic (PLEG): container finished" podID="6b7deba1-1cc3-4625-a248-3fb1aa70bf69" containerID="f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb" exitCode=0 Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.475233 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.475260 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6b7deba1-1cc3-4625-a248-3fb1aa70bf69","Type":"ContainerDied","Data":"f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb"} Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.475327 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6b7deba1-1cc3-4625-a248-3fb1aa70bf69","Type":"ContainerDied","Data":"9cdedce876d2977b440f4966cc43f733bcb8b4cd1604609a9d1cc5b84478d725"} Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.475355 4773 scope.go:117] "RemoveContainer" containerID="f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.482266 4773 generic.go:334] "Generic (PLEG): container finished" podID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerID="249f13e2ffe294237aadae46ca90d1274b32f82a5df5bd44c2bce85e7524f918" exitCode=143 Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.482378 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c19119e-6281-4d1b-ac5e-599d1fb52c95","Type":"ContainerDied","Data":"249f13e2ffe294237aadae46ca90d1274b32f82a5df5bd44c2bce85e7524f918"} Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.520241 4773 scope.go:117] "RemoveContainer" containerID="f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.521062 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb\": container with ID starting with f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb not found: ID does not exist" containerID="f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.523522 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb"} err="failed to get container status \"f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb\": rpc error: code = NotFound desc = could not find container \"f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb\": container with ID starting with f1a13802a0f97a1cda76d9f4d131baf0d652ef6c24cefdecc22b1383c659a1bb not found: ID does not exist" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.592976 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.616041 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.638931 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639425 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b7deba1-1cc3-4625-a248-3fb1aa70bf69" containerName="nova-cell1-conductor-conductor" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639447 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b7deba1-1cc3-4625-a248-3fb1aa70bf69" containerName="nova-cell1-conductor-conductor" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639461 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="extract-content" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639467 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="extract-content" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639481 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="extract-content" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639486 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="extract-content" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639505 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42c256b3-a909-4067-892d-520fc7972418" containerName="neutron-dhcp-openstack-openstack-cell1" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639513 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="42c256b3-a909-4067-892d-520fc7972418" containerName="neutron-dhcp-openstack-openstack-cell1" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639525 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="registry-server" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639533 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="registry-server" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639560 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="extract-utilities" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639567 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="extract-utilities" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639586 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="extract-utilities" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639594 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="extract-utilities" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.639609 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="registry-server" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639617 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="registry-server" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639850 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b7deba1-1cc3-4625-a248-3fb1aa70bf69" containerName="nova-cell1-conductor-conductor" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639869 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="527ccab1-972a-4053-8dfd-2959d2634200" containerName="registry-server" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639876 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="42c256b3-a909-4067-892d-520fc7972418" containerName="neutron-dhcp-openstack-openstack-cell1" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.639891 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="da32627d-ed09-44b5-a5ed-b8fe46fc4828" containerName="registry-server" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.640627 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.643535 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.658375 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.694576 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b7deba1-1cc3-4625-a248-3fb1aa70bf69" path="/var/lib/kubelet/pods/6b7deba1-1cc3-4625-a248-3fb1aa70bf69/volumes" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.727767 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkk2z\" (UniqueName: \"kubernetes.io/projected/d4d650f9-c96b-40d4-8ef9-e7c437571f32-kube-api-access-fkk2z\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.727854 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4d650f9-c96b-40d4-8ef9-e7c437571f32-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.727935 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d650f9-c96b-40d4-8ef9-e7c437571f32-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.830211 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkk2z\" (UniqueName: \"kubernetes.io/projected/d4d650f9-c96b-40d4-8ef9-e7c437571f32-kube-api-access-fkk2z\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.830330 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4d650f9-c96b-40d4-8ef9-e7c437571f32-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.830405 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d650f9-c96b-40d4-8ef9-e7c437571f32-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.837209 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4d650f9-c96b-40d4-8ef9-e7c437571f32-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.848110 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4d650f9-c96b-40d4-8ef9-e7c437571f32-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.850791 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkk2z\" (UniqueName: \"kubernetes.io/projected/d4d650f9-c96b-40d4-8ef9-e7c437571f32-kube-api-access-fkk2z\") pod \"nova-cell1-conductor-0\" (UID: \"d4d650f9-c96b-40d4-8ef9-e7c437571f32\") " pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.992899 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89 is running failed: container process not found" containerID="ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.993608 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89 is running failed: container process not found" containerID="ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.994241 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89 is running failed: container process not found" containerID="ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 22 14:21:24 crc kubenswrapper[4773]: E0122 14:21:24.994367 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="92a09152-089d-4802-ae3a-5e9d84b996c4" containerName="nova-cell0-conductor-conductor" Jan 22 14:21:24 crc kubenswrapper[4773]: I0122 14:21:24.997908 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.268671 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.342394 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjz78\" (UniqueName: \"kubernetes.io/projected/92a09152-089d-4802-ae3a-5e9d84b996c4-kube-api-access-fjz78\") pod \"92a09152-089d-4802-ae3a-5e9d84b996c4\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.342489 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-config-data\") pod \"92a09152-089d-4802-ae3a-5e9d84b996c4\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.342595 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-combined-ca-bundle\") pod \"92a09152-089d-4802-ae3a-5e9d84b996c4\" (UID: \"92a09152-089d-4802-ae3a-5e9d84b996c4\") " Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.348583 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92a09152-089d-4802-ae3a-5e9d84b996c4-kube-api-access-fjz78" (OuterVolumeSpecName: "kube-api-access-fjz78") pod "92a09152-089d-4802-ae3a-5e9d84b996c4" (UID: "92a09152-089d-4802-ae3a-5e9d84b996c4"). InnerVolumeSpecName "kube-api-access-fjz78". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.378869 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-config-data" (OuterVolumeSpecName: "config-data") pod "92a09152-089d-4802-ae3a-5e9d84b996c4" (UID: "92a09152-089d-4802-ae3a-5e9d84b996c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.386953 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92a09152-089d-4802-ae3a-5e9d84b996c4" (UID: "92a09152-089d-4802-ae3a-5e9d84b996c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.444587 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjz78\" (UniqueName: \"kubernetes.io/projected/92a09152-089d-4802-ae3a-5e9d84b996c4-kube-api-access-fjz78\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.444635 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.444653 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92a09152-089d-4802-ae3a-5e9d84b996c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.494482 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"92a09152-089d-4802-ae3a-5e9d84b996c4","Type":"ContainerDied","Data":"88798cd4f0144de088fd9a432cd7e01e085edac4c5e0920509d19cfda0e3fee4"} Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.494554 4773 scope.go:117] "RemoveContainer" containerID="ccd263a6bc7321ba12f623f00acda87138817526b3d968e708cb10e20070ce89" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.494579 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.519423 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.689305 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.705911 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.722606 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 14:21:25 crc kubenswrapper[4773]: E0122 14:21:25.723476 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92a09152-089d-4802-ae3a-5e9d84b996c4" containerName="nova-cell0-conductor-conductor" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.723499 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="92a09152-089d-4802-ae3a-5e9d84b996c4" containerName="nova-cell0-conductor-conductor" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.723774 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="92a09152-089d-4802-ae3a-5e9d84b996c4" containerName="nova-cell0-conductor-conductor" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.725374 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.727684 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.739851 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.757841 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.757977 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m99rv\" (UniqueName: \"kubernetes.io/projected/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-kube-api-access-m99rv\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.758117 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.859859 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m99rv\" (UniqueName: \"kubernetes.io/projected/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-kube-api-access-m99rv\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.860003 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.860075 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.864679 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.866016 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:25 crc kubenswrapper[4773]: I0122 14:21:25.878224 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m99rv\" (UniqueName: \"kubernetes.io/projected/b10fb344-5f69-482e-aa3b-4e5a0bbf1408-kube-api-access-m99rv\") pod \"nova-cell0-conductor-0\" (UID: \"b10fb344-5f69-482e-aa3b-4e5a0bbf1408\") " pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.054145 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.517551 4773 generic.go:334] "Generic (PLEG): container finished" podID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" containerID="364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd" exitCode=0 Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.517898 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5ac70a4f-4177-4f6b-a676-3e8c635c766a","Type":"ContainerDied","Data":"364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd"} Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.519687 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d4d650f9-c96b-40d4-8ef9-e7c437571f32","Type":"ContainerStarted","Data":"25a6d3a78d1fdf09ed28bffd7a7e955064bb3b4e1e499611bdda920cd022aa26"} Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.519713 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d4d650f9-c96b-40d4-8ef9-e7c437571f32","Type":"ContainerStarted","Data":"168d1eaaac4188451f7f9fe7014343cd4ac995e848ecfbdd917db3b2b86f87ff"} Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.527503 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.533822 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.544644 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.5446247939999997 podStartE2EDuration="2.544624794s" podCreationTimestamp="2026-01-22 14:21:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.542573446 +0000 UTC m=+8794.120689281" watchObservedRunningTime="2026-01-22 14:21:26.544624794 +0000 UTC m=+8794.122740619" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.674310 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92a09152-089d-4802-ae3a-5e9d84b996c4" path="/var/lib/kubelet/pods/92a09152-089d-4802-ae3a-5e9d84b996c4/volumes" Jan 22 14:21:26 crc kubenswrapper[4773]: E0122 14:21:26.739682 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd is running failed: container process not found" containerID="364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 14:21:26 crc kubenswrapper[4773]: E0122 14:21:26.740216 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd is running failed: container process not found" containerID="364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 14:21:26 crc kubenswrapper[4773]: E0122 14:21:26.740469 4773 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd is running failed: container process not found" containerID="364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 22 14:21:26 crc kubenswrapper[4773]: E0122 14:21:26.740501 4773 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" containerName="nova-scheduler-scheduler" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.797881 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.834655 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.96:8775/\": read tcp 10.217.0.2:51178->10.217.1.96:8775: read: connection reset by peer" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.835008 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.96:8775/\": read tcp 10.217.0.2:51194->10.217.1.96:8775: read: connection reset by peer" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.891014 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5scj\" (UniqueName: \"kubernetes.io/projected/5ac70a4f-4177-4f6b-a676-3e8c635c766a-kube-api-access-j5scj\") pod \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.891081 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data\") pod \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.891147 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-combined-ca-bundle\") pod \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.900475 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac70a4f-4177-4f6b-a676-3e8c635c766a-kube-api-access-j5scj" (OuterVolumeSpecName: "kube-api-access-j5scj") pod "5ac70a4f-4177-4f6b-a676-3e8c635c766a" (UID: "5ac70a4f-4177-4f6b-a676-3e8c635c766a"). InnerVolumeSpecName "kube-api-access-j5scj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.938635 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ac70a4f-4177-4f6b-a676-3e8c635c766a" (UID: "5ac70a4f-4177-4f6b-a676-3e8c635c766a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.994140 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data" (OuterVolumeSpecName: "config-data") pod "5ac70a4f-4177-4f6b-a676-3e8c635c766a" (UID: "5ac70a4f-4177-4f6b-a676-3e8c635c766a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.994314 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data\") pod \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\" (UID: \"5ac70a4f-4177-4f6b-a676-3e8c635c766a\") " Jan 22 14:21:26 crc kubenswrapper[4773]: W0122 14:21:26.994501 4773 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/5ac70a4f-4177-4f6b-a676-3e8c635c766a/volumes/kubernetes.io~secret/config-data Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.994517 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data" (OuterVolumeSpecName: "config-data") pod "5ac70a4f-4177-4f6b-a676-3e8c635c766a" (UID: "5ac70a4f-4177-4f6b-a676-3e8c635c766a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.994971 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5scj\" (UniqueName: \"kubernetes.io/projected/5ac70a4f-4177-4f6b-a676-3e8c635c766a-kube-api-access-j5scj\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.994988 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:26 crc kubenswrapper[4773]: I0122 14:21:26.994997 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac70a4f-4177-4f6b-a676-3e8c635c766a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.348820 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.406992 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-nova-metadata-tls-certs\") pod \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.407326 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-logs\") pod \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.407485 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kdfc\" (UniqueName: \"kubernetes.io/projected/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-kube-api-access-2kdfc\") pod \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.407549 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-combined-ca-bundle\") pod \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.407752 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-config-data\") pod \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\" (UID: \"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.408016 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-logs" (OuterVolumeSpecName: "logs") pod "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" (UID: "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.408469 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-logs\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.416603 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-kube-api-access-2kdfc" (OuterVolumeSpecName: "kube-api-access-2kdfc") pod "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" (UID: "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4"). InnerVolumeSpecName "kube-api-access-2kdfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.460504 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-config-data" (OuterVolumeSpecName: "config-data") pod "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" (UID: "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.473256 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" (UID: "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.485319 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" (UID: "29de8e7e-ba3b-4bc5-b629-7d1e665d83a4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.512003 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.512041 4773 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.512055 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kdfc\" (UniqueName: \"kubernetes.io/projected/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-kube-api-access-2kdfc\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.512064 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.559345 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b10fb344-5f69-482e-aa3b-4e5a0bbf1408","Type":"ContainerStarted","Data":"0ea27e7dcaeb8bae28b4a5640c88a591ad3eea868c5385130616289577e7c6bc"} Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.559412 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b10fb344-5f69-482e-aa3b-4e5a0bbf1408","Type":"ContainerStarted","Data":"feeba9a0650998eb1e45695fe25112d334db99177d4f6e7977cf8541efbaaafc"} Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.559457 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.583745 4773 generic.go:334] "Generic (PLEG): container finished" podID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerID="09b19ac9cd4cb5d11572c1d77637629883fee5390c4613ceff7dc60aeb60034b" exitCode=0 Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.583876 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c19119e-6281-4d1b-ac5e-599d1fb52c95","Type":"ContainerDied","Data":"09b19ac9cd4cb5d11572c1d77637629883fee5390c4613ceff7dc60aeb60034b"} Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.593684 4773 generic.go:334] "Generic (PLEG): container finished" podID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerID="7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778" exitCode=0 Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.593771 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4","Type":"ContainerDied","Data":"7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778"} Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.593801 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"29de8e7e-ba3b-4bc5-b629-7d1e665d83a4","Type":"ContainerDied","Data":"cded10f49185053a7f631250a53f8627712af4c29f9f90eac233aa4ef9752e07"} Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.593819 4773 scope.go:117] "RemoveContainer" containerID="7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.593938 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.620333 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.620891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5ac70a4f-4177-4f6b-a676-3e8c635c766a","Type":"ContainerDied","Data":"e667ff4aff6f87772e778bfc2b2b2b44597796903cd6a3e00d650273e884ab87"} Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.690634 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.707094 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.7070682489999998 podStartE2EDuration="2.707068249s" podCreationTimestamp="2026-01-22 14:21:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:27.614147225 +0000 UTC m=+8795.192263060" watchObservedRunningTime="2026-01-22 14:21:27.707068249 +0000 UTC m=+8795.285184064" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.711361 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.717213 4773 scope.go:117] "RemoveContainer" containerID="ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.738441 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.766416 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.777837 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.786146 4773 scope.go:117] "RemoveContainer" containerID="7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778" Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.786652 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778\": container with ID starting with 7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778 not found: ID does not exist" containerID="7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.786699 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778"} err="failed to get container status \"7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778\": rpc error: code = NotFound desc = could not find container \"7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778\": container with ID starting with 7e6cffadfd31ecd8a29c5841627e778405c758aad9efd58bc7ddb226c3160778 not found: ID does not exist" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.786728 4773 scope.go:117] "RemoveContainer" containerID="ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636" Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.800833 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636\": container with ID starting with ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636 not found: ID does not exist" containerID="ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.800880 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636"} err="failed to get container status \"ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636\": rpc error: code = NotFound desc = could not find container \"ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636\": container with ID starting with ded56854eb7751cde77319ca7bb63fae013a51846a11d10fff705d8011ac1636 not found: ID does not exist" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.800912 4773 scope.go:117] "RemoveContainer" containerID="364fd2ebe4297cca51026cfe8622c72c708d9df3285c62b117b332acf30a1fdd" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.842618 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-config-data\") pod \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.842697 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-public-tls-certs\") pod \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.842776 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sncrb\" (UniqueName: \"kubernetes.io/projected/9c19119e-6281-4d1b-ac5e-599d1fb52c95-kube-api-access-sncrb\") pod \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.842832 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-combined-ca-bundle\") pod \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.842886 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-internal-tls-certs\") pod \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.843116 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c19119e-6281-4d1b-ac5e-599d1fb52c95-logs\") pod \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\" (UID: \"9c19119e-6281-4d1b-ac5e-599d1fb52c95\") " Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.853470 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.854490 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-metadata" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.854515 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-metadata" Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.854575 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-api" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.854583 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-api" Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.854600 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-log" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.854606 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-log" Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.854626 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-log" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.854634 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-log" Jan 22 14:21:27 crc kubenswrapper[4773]: E0122 14:21:27.854650 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" containerName="nova-scheduler-scheduler" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.854659 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" containerName="nova-scheduler-scheduler" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.856166 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" containerName="nova-scheduler-scheduler" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.856256 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-metadata" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.856297 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-log" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.856329 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" containerName="nova-metadata-log" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.856363 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" containerName="nova-api-api" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.863442 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c19119e-6281-4d1b-ac5e-599d1fb52c95-logs" (OuterVolumeSpecName: "logs") pod "9c19119e-6281-4d1b-ac5e-599d1fb52c95" (UID: "9c19119e-6281-4d1b-ac5e-599d1fb52c95"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.866744 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c19119e-6281-4d1b-ac5e-599d1fb52c95-kube-api-access-sncrb" (OuterVolumeSpecName: "kube-api-access-sncrb") pod "9c19119e-6281-4d1b-ac5e-599d1fb52c95" (UID: "9c19119e-6281-4d1b-ac5e-599d1fb52c95"). InnerVolumeSpecName "kube-api-access-sncrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.925778 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.940353 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.943660 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.943958 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.961326 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.965337 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.967339 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.967489 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.972569 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjsl8\" (UniqueName: \"kubernetes.io/projected/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-kube-api-access-hjsl8\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.972889 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-config-data\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.973255 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-logs\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.973746 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sncrb\" (UniqueName: \"kubernetes.io/projected/9c19119e-6281-4d1b-ac5e-599d1fb52c95-kube-api-access-sncrb\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:27 crc kubenswrapper[4773]: I0122 14:21:27.973784 4773 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c19119e-6281-4d1b-ac5e-599d1fb52c95-logs\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.074592 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c19119e-6281-4d1b-ac5e-599d1fb52c95" (UID: "9c19119e-6281-4d1b-ac5e-599d1fb52c95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.076539 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-logs\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.076727 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.076778 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.076828 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjsl8\" (UniqueName: \"kubernetes.io/projected/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-kube-api-access-hjsl8\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.076854 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-config-data\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.077824 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-config-data" (OuterVolumeSpecName: "config-data") pod "9c19119e-6281-4d1b-ac5e-599d1fb52c95" (UID: "9c19119e-6281-4d1b-ac5e-599d1fb52c95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.099988 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-logs\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.101894 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.117218 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.120430 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.127735 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.133947 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.140960 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjsl8\" (UniqueName: \"kubernetes.io/projected/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-kube-api-access-hjsl8\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.142067 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-config-data\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.144827 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9e640f7-eedc-45e6-b379-3f78fdcca5f2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e9e640f7-eedc-45e6-b379-3f78fdcca5f2\") " pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.192012 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9c19119e-6281-4d1b-ac5e-599d1fb52c95" (UID: "9c19119e-6281-4d1b-ac5e-599d1fb52c95"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.223846 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47a6230-96aa-47e7-b785-4f2a3a40ca16-config-data\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.223890 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47a6230-96aa-47e7-b785-4f2a3a40ca16-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.223951 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhpj7\" (UniqueName: \"kubernetes.io/projected/b47a6230-96aa-47e7-b785-4f2a3a40ca16-kube-api-access-jhpj7\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.224124 4773 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.236069 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9c19119e-6281-4d1b-ac5e-599d1fb52c95" (UID: "9c19119e-6281-4d1b-ac5e-599d1fb52c95"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.322110 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.326620 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47a6230-96aa-47e7-b785-4f2a3a40ca16-config-data\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.326681 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47a6230-96aa-47e7-b785-4f2a3a40ca16-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.326739 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhpj7\" (UniqueName: \"kubernetes.io/projected/b47a6230-96aa-47e7-b785-4f2a3a40ca16-kube-api-access-jhpj7\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.326958 4773 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c19119e-6281-4d1b-ac5e-599d1fb52c95-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.331430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47a6230-96aa-47e7-b785-4f2a3a40ca16-config-data\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.333156 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47a6230-96aa-47e7-b785-4f2a3a40ca16-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.346336 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhpj7\" (UniqueName: \"kubernetes.io/projected/b47a6230-96aa-47e7-b785-4f2a3a40ca16-kube-api-access-jhpj7\") pod \"nova-scheduler-0\" (UID: \"b47a6230-96aa-47e7-b785-4f2a3a40ca16\") " pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.537929 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.679444 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.690070 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29de8e7e-ba3b-4bc5-b629-7d1e665d83a4" path="/var/lib/kubelet/pods/29de8e7e-ba3b-4bc5-b629-7d1e665d83a4/volumes" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.690742 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac70a4f-4177-4f6b-a676-3e8c635c766a" path="/var/lib/kubelet/pods/5ac70a4f-4177-4f6b-a676-3e8c635c766a/volumes" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.692576 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c19119e-6281-4d1b-ac5e-599d1fb52c95","Type":"ContainerDied","Data":"b9418d4e0e2e5f37379e288479a7edc7ffdc1b3cab59da0ad2d6b0fab964c5dc"} Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.692662 4773 scope.go:117] "RemoveContainer" containerID="09b19ac9cd4cb5d11572c1d77637629883fee5390c4613ceff7dc60aeb60034b" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.780265 4773 scope.go:117] "RemoveContainer" containerID="249f13e2ffe294237aadae46ca90d1274b32f82a5df5bd44c2bce85e7524f918" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.802595 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.834028 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.858135 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.866529 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.868540 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.868787 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.869006 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.883018 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.909112 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.951732 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-config-data\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.952073 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcwwz\" (UniqueName: \"kubernetes.io/projected/a7a91662-5b00-4b02-8c60-7abddc9b9de5-kube-api-access-pcwwz\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.952122 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.952221 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.952247 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7a91662-5b00-4b02-8c60-7abddc9b9de5-logs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:28 crc kubenswrapper[4773]: I0122 14:21:28.952276 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-public-tls-certs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.054892 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-config-data\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.054943 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcwwz\" (UniqueName: \"kubernetes.io/projected/a7a91662-5b00-4b02-8c60-7abddc9b9de5-kube-api-access-pcwwz\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.054980 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.055093 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.055119 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7a91662-5b00-4b02-8c60-7abddc9b9de5-logs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.055147 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-public-tls-certs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.055982 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a7a91662-5b00-4b02-8c60-7abddc9b9de5-logs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.062773 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-config-data\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.062978 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.063197 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-public-tls-certs\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.063919 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7a91662-5b00-4b02-8c60-7abddc9b9de5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.075794 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcwwz\" (UniqueName: \"kubernetes.io/projected/a7a91662-5b00-4b02-8c60-7abddc9b9de5-kube-api-access-pcwwz\") pod \"nova-api-0\" (UID: \"a7a91662-5b00-4b02-8c60-7abddc9b9de5\") " pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: W0122 14:21:29.158101 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb47a6230_96aa_47e7_b785_4f2a3a40ca16.slice/crio-ca52df70ca6d17da8f46659258121d56a247bb59ae0dfafe391c791de8a29ff4 WatchSource:0}: Error finding container ca52df70ca6d17da8f46659258121d56a247bb59ae0dfafe391c791de8a29ff4: Status 404 returned error can't find the container with id ca52df70ca6d17da8f46659258121d56a247bb59ae0dfafe391c791de8a29ff4 Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.161657 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.250748 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.707246 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e9e640f7-eedc-45e6-b379-3f78fdcca5f2","Type":"ContainerStarted","Data":"86991ed6c7786811a43e4e2483266743b989b33c3e5c724d95b3a21d4b7b0fb9"} Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.707911 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e9e640f7-eedc-45e6-b379-3f78fdcca5f2","Type":"ContainerStarted","Data":"3a9780745ac5e2aebc86cab39bab795c42ece0febf4048488403466a2bc5bc8f"} Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.707926 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e9e640f7-eedc-45e6-b379-3f78fdcca5f2","Type":"ContainerStarted","Data":"2333cb23fc020f33eca173e9bcc1bf2a2f41e6608a2577648744c4cc7372786d"} Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.716599 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b47a6230-96aa-47e7-b785-4f2a3a40ca16","Type":"ContainerStarted","Data":"ffbccf3ab9c84b3f2df183afa5764545a1f183caef127bf414670278e692fc99"} Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.716673 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b47a6230-96aa-47e7-b785-4f2a3a40ca16","Type":"ContainerStarted","Data":"ca52df70ca6d17da8f46659258121d56a247bb59ae0dfafe391c791de8a29ff4"} Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.742605 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.742579946 podStartE2EDuration="2.742579946s" podCreationTimestamp="2026-01-22 14:21:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:29.737353198 +0000 UTC m=+8797.315469043" watchObservedRunningTime="2026-01-22 14:21:29.742579946 +0000 UTC m=+8797.320695771" Jan 22 14:21:29 crc kubenswrapper[4773]: I0122 14:21:29.794612 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 22 14:21:30 crc kubenswrapper[4773]: I0122 14:21:30.685399 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c19119e-6281-4d1b-ac5e-599d1fb52c95" path="/var/lib/kubelet/pods/9c19119e-6281-4d1b-ac5e-599d1fb52c95/volumes" Jan 22 14:21:30 crc kubenswrapper[4773]: I0122 14:21:30.727007 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a7a91662-5b00-4b02-8c60-7abddc9b9de5","Type":"ContainerStarted","Data":"e067bc709d593d983eba09721cdc64c93a654b6fcb81809511b57b26e69e29e8"} Jan 22 14:21:30 crc kubenswrapper[4773]: I0122 14:21:30.727069 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a7a91662-5b00-4b02-8c60-7abddc9b9de5","Type":"ContainerStarted","Data":"cc837db9f21edfaea869b760ff313aa03e847bf308779137527f86608dd31053"} Jan 22 14:21:30 crc kubenswrapper[4773]: I0122 14:21:30.780341 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.780314306 podStartE2EDuration="3.780314306s" podCreationTimestamp="2026-01-22 14:21:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:30.764827347 +0000 UTC m=+8798.342943192" watchObservedRunningTime="2026-01-22 14:21:30.780314306 +0000 UTC m=+8798.358430151" Jan 22 14:21:31 crc kubenswrapper[4773]: I0122 14:21:31.736582 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a7a91662-5b00-4b02-8c60-7abddc9b9de5","Type":"ContainerStarted","Data":"10cfcbc32536fd302b51da84ea7e64ad31684fe1fe7894d0a4c85605fb5de07f"} Jan 22 14:21:31 crc kubenswrapper[4773]: I0122 14:21:31.757995 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.757973954 podStartE2EDuration="3.757973954s" podCreationTimestamp="2026-01-22 14:21:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:31.75395135 +0000 UTC m=+8799.332067175" watchObservedRunningTime="2026-01-22 14:21:31.757973954 +0000 UTC m=+8799.336089769" Jan 22 14:21:33 crc kubenswrapper[4773]: I0122 14:21:33.322537 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 14:21:33 crc kubenswrapper[4773]: I0122 14:21:33.322921 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 22 14:21:33 crc kubenswrapper[4773]: I0122 14:21:33.539639 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 22 14:21:35 crc kubenswrapper[4773]: I0122 14:21:35.033625 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 22 14:21:36 crc kubenswrapper[4773]: I0122 14:21:36.085325 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 22 14:21:38 crc kubenswrapper[4773]: I0122 14:21:38.323093 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 14:21:38 crc kubenswrapper[4773]: I0122 14:21:38.323462 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 22 14:21:38 crc kubenswrapper[4773]: I0122 14:21:38.539886 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 22 14:21:38 crc kubenswrapper[4773]: I0122 14:21:38.572507 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 22 14:21:38 crc kubenswrapper[4773]: I0122 14:21:38.965483 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 22 14:21:39 crc kubenswrapper[4773]: I0122 14:21:39.251658 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 14:21:39 crc kubenswrapper[4773]: I0122 14:21:39.251999 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 22 14:21:39 crc kubenswrapper[4773]: I0122 14:21:39.335480 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e9e640f7-eedc-45e6-b379-3f78fdcca5f2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 14:21:39 crc kubenswrapper[4773]: I0122 14:21:39.335486 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e9e640f7-eedc-45e6-b379-3f78fdcca5f2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 14:21:40 crc kubenswrapper[4773]: I0122 14:21:40.266492 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a7a91662-5b00-4b02-8c60-7abddc9b9de5" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 14:21:40 crc kubenswrapper[4773]: I0122 14:21:40.266568 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a7a91662-5b00-4b02-8c60-7abddc9b9de5" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 22 14:21:48 crc kubenswrapper[4773]: I0122 14:21:48.329831 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 14:21:48 crc kubenswrapper[4773]: I0122 14:21:48.341007 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 22 14:21:48 crc kubenswrapper[4773]: I0122 14:21:48.342525 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 14:21:49 crc kubenswrapper[4773]: I0122 14:21:49.035008 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 22 14:21:49 crc kubenswrapper[4773]: I0122 14:21:49.260612 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 14:21:49 crc kubenswrapper[4773]: I0122 14:21:49.261720 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 22 14:21:49 crc kubenswrapper[4773]: I0122 14:21:49.261775 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 14:21:49 crc kubenswrapper[4773]: I0122 14:21:49.267787 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 14:21:50 crc kubenswrapper[4773]: I0122 14:21:50.037675 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 22 14:21:50 crc kubenswrapper[4773]: I0122 14:21:50.042823 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.335744 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz"] Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.337930 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.340535 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.340728 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.341089 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-bzqnq" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.341481 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.343470 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.343826 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.344020 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.346489 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz"] Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.423960 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.424025 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.424061 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxlpz\" (UniqueName: \"kubernetes.io/projected/69e89853-4593-4500-9f28-02cff2d55d23-kube-api-access-kxlpz\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.424112 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.424136 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.424155 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.425328 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/69e89853-4593-4500-9f28-02cff2d55d23-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.425426 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.425467 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.527082 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxlpz\" (UniqueName: \"kubernetes.io/projected/69e89853-4593-4500-9f28-02cff2d55d23-kube-api-access-kxlpz\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.527183 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.527222 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.527249 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.528424 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/69e89853-4593-4500-9f28-02cff2d55d23-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.528484 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.528523 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.528567 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.528641 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.534042 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/69e89853-4593-4500-9f28-02cff2d55d23-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.535466 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.536161 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.537013 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.540574 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.542002 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.542336 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.548789 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-ssh-key-openstack-cell1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.562082 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxlpz\" (UniqueName: \"kubernetes.io/projected/69e89853-4593-4500-9f28-02cff2d55d23-kube-api-access-kxlpz\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:51 crc kubenswrapper[4773]: I0122 14:21:51.659640 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:21:52 crc kubenswrapper[4773]: I0122 14:21:52.230347 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz"] Jan 22 14:21:53 crc kubenswrapper[4773]: I0122 14:21:53.068368 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" event={"ID":"69e89853-4593-4500-9f28-02cff2d55d23","Type":"ContainerStarted","Data":"2ae569745d1e84cee8154df162ea6108fa749f174d3be7b19a953296bd678f56"} Jan 22 14:21:53 crc kubenswrapper[4773]: I0122 14:21:53.068978 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" event={"ID":"69e89853-4593-4500-9f28-02cff2d55d23","Type":"ContainerStarted","Data":"1f4c143e208439578cb32b4085a12f4f0706f50c6e1fa1f7c29f4c2640abe40c"} Jan 22 14:21:53 crc kubenswrapper[4773]: I0122 14:21:53.100856 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" podStartSLOduration=1.60297953 podStartE2EDuration="2.100811479s" podCreationTimestamp="2026-01-22 14:21:51 +0000 UTC" firstStartedPulling="2026-01-22 14:21:52.237966715 +0000 UTC m=+8819.816082550" lastFinishedPulling="2026-01-22 14:21:52.735798674 +0000 UTC m=+8820.313914499" observedRunningTime="2026-01-22 14:21:53.088377476 +0000 UTC m=+8820.666493301" watchObservedRunningTime="2026-01-22 14:21:53.100811479 +0000 UTC m=+8820.678927304" Jan 22 14:23:04 crc kubenswrapper[4773]: I0122 14:23:04.074842 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:23:04 crc kubenswrapper[4773]: I0122 14:23:04.075501 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:23:34 crc kubenswrapper[4773]: I0122 14:23:34.074173 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:23:34 crc kubenswrapper[4773]: I0122 14:23:34.074751 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.074811 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.076360 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.076494 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.077393 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f24ce25274805a35369c26a155360435b5830f3b7c26eca6af51ea6c67fa9bc6"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.077537 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://f24ce25274805a35369c26a155360435b5830f3b7c26eca6af51ea6c67fa9bc6" gracePeriod=600 Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.535307 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="f24ce25274805a35369c26a155360435b5830f3b7c26eca6af51ea6c67fa9bc6" exitCode=0 Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.535392 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"f24ce25274805a35369c26a155360435b5830f3b7c26eca6af51ea6c67fa9bc6"} Jan 22 14:24:04 crc kubenswrapper[4773]: I0122 14:24:04.535776 4773 scope.go:117] "RemoveContainer" containerID="209d07516c96479da6a4eb169115eae26bf83c0f591a094632de1883fa8599ad" Jan 22 14:24:05 crc kubenswrapper[4773]: I0122 14:24:05.549086 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187"} Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.428694 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nj6df"] Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.431753 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.470541 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nj6df"] Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.517905 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-utilities\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.518256 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r59d6\" (UniqueName: \"kubernetes.io/projected/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-kube-api-access-r59d6\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.518737 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-catalog-content\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.621324 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-catalog-content\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.621421 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-utilities\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.621476 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r59d6\" (UniqueName: \"kubernetes.io/projected/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-kube-api-access-r59d6\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.622387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-utilities\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.622459 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-catalog-content\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.644122 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r59d6\" (UniqueName: \"kubernetes.io/projected/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-kube-api-access-r59d6\") pod \"community-operators-nj6df\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:27 crc kubenswrapper[4773]: I0122 14:24:27.760277 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:28 crc kubenswrapper[4773]: I0122 14:24:28.310914 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nj6df"] Jan 22 14:24:28 crc kubenswrapper[4773]: I0122 14:24:28.823798 4773 generic.go:334] "Generic (PLEG): container finished" podID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerID="71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62" exitCode=0 Jan 22 14:24:28 crc kubenswrapper[4773]: I0122 14:24:28.823902 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerDied","Data":"71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62"} Jan 22 14:24:28 crc kubenswrapper[4773]: I0122 14:24:28.824095 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerStarted","Data":"911573bc22644bbf88672bc8485787338164b841b14b74db62212b42528bdd8b"} Jan 22 14:24:28 crc kubenswrapper[4773]: I0122 14:24:28.827503 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:24:29 crc kubenswrapper[4773]: I0122 14:24:29.836594 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerStarted","Data":"22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12"} Jan 22 14:24:30 crc kubenswrapper[4773]: I0122 14:24:30.848398 4773 generic.go:334] "Generic (PLEG): container finished" podID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerID="22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12" exitCode=0 Jan 22 14:24:30 crc kubenswrapper[4773]: I0122 14:24:30.848486 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerDied","Data":"22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12"} Jan 22 14:24:32 crc kubenswrapper[4773]: I0122 14:24:32.880543 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerStarted","Data":"b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5"} Jan 22 14:24:32 crc kubenswrapper[4773]: I0122 14:24:32.915384 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nj6df" podStartSLOduration=3.479430782 podStartE2EDuration="5.915361329s" podCreationTimestamp="2026-01-22 14:24:27 +0000 UTC" firstStartedPulling="2026-01-22 14:24:28.826838399 +0000 UTC m=+8976.404954234" lastFinishedPulling="2026-01-22 14:24:31.262768946 +0000 UTC m=+8978.840884781" observedRunningTime="2026-01-22 14:24:32.9055237 +0000 UTC m=+8980.483639535" watchObservedRunningTime="2026-01-22 14:24:32.915361329 +0000 UTC m=+8980.493477154" Jan 22 14:24:37 crc kubenswrapper[4773]: I0122 14:24:37.761333 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:37 crc kubenswrapper[4773]: I0122 14:24:37.762858 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:37 crc kubenswrapper[4773]: I0122 14:24:37.826586 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:37 crc kubenswrapper[4773]: I0122 14:24:37.987951 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:38 crc kubenswrapper[4773]: I0122 14:24:38.066967 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nj6df"] Jan 22 14:24:38 crc kubenswrapper[4773]: I0122 14:24:38.950975 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" event={"ID":"69e89853-4593-4500-9f28-02cff2d55d23","Type":"ContainerDied","Data":"2ae569745d1e84cee8154df162ea6108fa749f174d3be7b19a953296bd678f56"} Jan 22 14:24:38 crc kubenswrapper[4773]: I0122 14:24:38.951193 4773 generic.go:334] "Generic (PLEG): container finished" podID="69e89853-4593-4500-9f28-02cff2d55d23" containerID="2ae569745d1e84cee8154df162ea6108fa749f174d3be7b19a953296bd678f56" exitCode=0 Jan 22 14:24:39 crc kubenswrapper[4773]: I0122 14:24:39.960509 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nj6df" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="registry-server" containerID="cri-o://b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5" gracePeriod=2 Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.376835 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521570 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-0\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521614 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-0\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521724 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxlpz\" (UniqueName: \"kubernetes.io/projected/69e89853-4593-4500-9f28-02cff2d55d23-kube-api-access-kxlpz\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521773 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-inventory\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521800 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-1\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521844 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-1\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521931 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-combined-ca-bundle\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.521976 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/69e89853-4593-4500-9f28-02cff2d55d23-nova-cells-global-config-0\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.522008 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-ssh-key-openstack-cell1\") pod \"69e89853-4593-4500-9f28-02cff2d55d23\" (UID: \"69e89853-4593-4500-9f28-02cff2d55d23\") " Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.528335 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.528456 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69e89853-4593-4500-9f28-02cff2d55d23-kube-api-access-kxlpz" (OuterVolumeSpecName: "kube-api-access-kxlpz") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "kube-api-access-kxlpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.560479 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.561549 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.562093 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-inventory" (OuterVolumeSpecName: "inventory") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.563385 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.576605 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.576703 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69e89853-4593-4500-9f28-02cff2d55d23-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.591006 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "69e89853-4593-4500-9f28-02cff2d55d23" (UID: "69e89853-4593-4500-9f28-02cff2d55d23"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625421 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxlpz\" (UniqueName: \"kubernetes.io/projected/69e89853-4593-4500-9f28-02cff2d55d23-kube-api-access-kxlpz\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625464 4773 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-inventory\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625476 4773 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625485 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625497 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625507 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/69e89853-4593-4500-9f28-02cff2d55d23-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625515 4773 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625523 4773 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.625532 4773 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/69e89853-4593-4500-9f28-02cff2d55d23-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.857535 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.975320 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" event={"ID":"69e89853-4593-4500-9f28-02cff2d55d23","Type":"ContainerDied","Data":"1f4c143e208439578cb32b4085a12f4f0706f50c6e1fa1f7c29f4c2640abe40c"} Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.975363 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f4c143e208439578cb32b4085a12f4f0706f50c6e1fa1f7c29f4c2640abe40c" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.975389 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.982266 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nj6df" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.982310 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerDied","Data":"b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5"} Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.982375 4773 scope.go:117] "RemoveContainer" containerID="b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5" Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.982166 4773 generic.go:334] "Generic (PLEG): container finished" podID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerID="b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5" exitCode=0 Jan 22 14:24:40 crc kubenswrapper[4773]: I0122 14:24:40.988543 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nj6df" event={"ID":"87ed7103-61ca-49aa-94a4-0ca2ebe40e87","Type":"ContainerDied","Data":"911573bc22644bbf88672bc8485787338164b841b14b74db62212b42528bdd8b"} Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.013352 4773 scope.go:117] "RemoveContainer" containerID="22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.037053 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-catalog-content\") pod \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.037351 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-utilities\") pod \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.037526 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r59d6\" (UniqueName: \"kubernetes.io/projected/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-kube-api-access-r59d6\") pod \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\" (UID: \"87ed7103-61ca-49aa-94a4-0ca2ebe40e87\") " Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.037761 4773 scope.go:117] "RemoveContainer" containerID="71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.038178 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-utilities" (OuterVolumeSpecName: "utilities") pod "87ed7103-61ca-49aa-94a4-0ca2ebe40e87" (UID: "87ed7103-61ca-49aa-94a4-0ca2ebe40e87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.042339 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-kube-api-access-r59d6" (OuterVolumeSpecName: "kube-api-access-r59d6") pod "87ed7103-61ca-49aa-94a4-0ca2ebe40e87" (UID: "87ed7103-61ca-49aa-94a4-0ca2ebe40e87"). InnerVolumeSpecName "kube-api-access-r59d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.080445 4773 scope.go:117] "RemoveContainer" containerID="b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5" Jan 22 14:24:41 crc kubenswrapper[4773]: E0122 14:24:41.089823 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5\": container with ID starting with b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5 not found: ID does not exist" containerID="b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.089886 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5"} err="failed to get container status \"b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5\": rpc error: code = NotFound desc = could not find container \"b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5\": container with ID starting with b3d9c55e44eec92489bf8b996c21d6c92b568a740dc4e535a3ba24f0a04ae7e5 not found: ID does not exist" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.089920 4773 scope.go:117] "RemoveContainer" containerID="22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12" Jan 22 14:24:41 crc kubenswrapper[4773]: E0122 14:24:41.090820 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12\": container with ID starting with 22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12 not found: ID does not exist" containerID="22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.090863 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12"} err="failed to get container status \"22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12\": rpc error: code = NotFound desc = could not find container \"22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12\": container with ID starting with 22fe933b726777bef6be213b7f4625cac371b63081143cb4443adab7e3930b12 not found: ID does not exist" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.090894 4773 scope.go:117] "RemoveContainer" containerID="71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62" Jan 22 14:24:41 crc kubenswrapper[4773]: E0122 14:24:41.091535 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62\": container with ID starting with 71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62 not found: ID does not exist" containerID="71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.091792 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62"} err="failed to get container status \"71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62\": rpc error: code = NotFound desc = could not find container \"71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62\": container with ID starting with 71df08d8ef32fd2e8e5985544d3e5b4156b4c75ebec5ed54f60bd51da6db4e62 not found: ID does not exist" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.095377 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "87ed7103-61ca-49aa-94a4-0ca2ebe40e87" (UID: "87ed7103-61ca-49aa-94a4-0ca2ebe40e87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.140777 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r59d6\" (UniqueName: \"kubernetes.io/projected/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-kube-api-access-r59d6\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.141214 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.141225 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/87ed7103-61ca-49aa-94a4-0ca2ebe40e87-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.318786 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nj6df"] Jan 22 14:24:41 crc kubenswrapper[4773]: I0122 14:24:41.330000 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nj6df"] Jan 22 14:24:42 crc kubenswrapper[4773]: I0122 14:24:42.669803 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" path="/var/lib/kubelet/pods/87ed7103-61ca-49aa-94a4-0ca2ebe40e87/volumes" Jan 22 14:26:04 crc kubenswrapper[4773]: I0122 14:26:04.074068 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:26:04 crc kubenswrapper[4773]: I0122 14:26:04.074589 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:26:24 crc kubenswrapper[4773]: E0122 14:26:24.017406 4773 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.97:37708->38.102.83.97:41367: write tcp 38.102.83.97:37708->38.102.83.97:41367: write: broken pipe Jan 22 14:26:29 crc kubenswrapper[4773]: I0122 14:26:29.909698 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 14:26:29 crc kubenswrapper[4773]: I0122 14:26:29.910513 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="a14d1fa5-fa02-4064-bffc-6e5bb0f67531" containerName="adoption" containerID="cri-o://bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142" gracePeriod=30 Jan 22 14:26:34 crc kubenswrapper[4773]: I0122 14:26:34.075203 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:26:34 crc kubenswrapper[4773]: I0122 14:26:34.075753 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.417663 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.481150 4773 generic.go:334] "Generic (PLEG): container finished" podID="a14d1fa5-fa02-4064-bffc-6e5bb0f67531" containerID="bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142" exitCode=137 Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.481216 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"a14d1fa5-fa02-4064-bffc-6e5bb0f67531","Type":"ContainerDied","Data":"bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142"} Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.481253 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"a14d1fa5-fa02-4064-bffc-6e5bb0f67531","Type":"ContainerDied","Data":"447604bccc2fca449ce1de1c701a88a0f3cf8621f9e4d0be538d2d6404c0d81c"} Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.481275 4773 scope.go:117] "RemoveContainer" containerID="bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.481488 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.507895 4773 scope.go:117] "RemoveContainer" containerID="bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142" Jan 22 14:27:00 crc kubenswrapper[4773]: E0122 14:27:00.508411 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142\": container with ID starting with bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142 not found: ID does not exist" containerID="bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.508497 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142"} err="failed to get container status \"bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142\": rpc error: code = NotFound desc = could not find container \"bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142\": container with ID starting with bf8d4d3b032f86913c2e301c2f397ccef4d80417030d623bcb64240395380142 not found: ID does not exist" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.546958 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") pod \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.547379 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97962\" (UniqueName: \"kubernetes.io/projected/a14d1fa5-fa02-4064-bffc-6e5bb0f67531-kube-api-access-97962\") pod \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\" (UID: \"a14d1fa5-fa02-4064-bffc-6e5bb0f67531\") " Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.558553 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a14d1fa5-fa02-4064-bffc-6e5bb0f67531-kube-api-access-97962" (OuterVolumeSpecName: "kube-api-access-97962") pod "a14d1fa5-fa02-4064-bffc-6e5bb0f67531" (UID: "a14d1fa5-fa02-4064-bffc-6e5bb0f67531"). InnerVolumeSpecName "kube-api-access-97962". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.564194 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece" (OuterVolumeSpecName: "mariadb-data") pod "a14d1fa5-fa02-4064-bffc-6e5bb0f67531" (UID: "a14d1fa5-fa02-4064-bffc-6e5bb0f67531"). InnerVolumeSpecName "pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.649574 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") on node \"crc\" " Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.649619 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97962\" (UniqueName: \"kubernetes.io/projected/a14d1fa5-fa02-4064-bffc-6e5bb0f67531-kube-api-access-97962\") on node \"crc\" DevicePath \"\"" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.692395 4773 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.692802 4773 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece") on node "crc" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.752926 4773 reconciler_common.go:293] "Volume detached for volume \"pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a311c000-22a2-44a0-967f-2ec5ecc7cece\") on node \"crc\" DevicePath \"\"" Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.806201 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 14:27:00 crc kubenswrapper[4773]: I0122 14:27:00.815706 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Jan 22 14:27:01 crc kubenswrapper[4773]: I0122 14:27:01.446916 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 14:27:01 crc kubenswrapper[4773]: I0122 14:27:01.447472 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="938f08ac-3fcb-4667-8af5-61f91610e9df" containerName="adoption" containerID="cri-o://1af58a65f114fd81891be0393bf9cb4d322789b5c642ba6da987551569c81c4f" gracePeriod=30 Jan 22 14:27:02 crc kubenswrapper[4773]: I0122 14:27:02.676913 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a14d1fa5-fa02-4064-bffc-6e5bb0f67531" path="/var/lib/kubelet/pods/a14d1fa5-fa02-4064-bffc-6e5bb0f67531/volumes" Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.073919 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.075211 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.075432 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.076568 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.076769 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" gracePeriod=600 Jan 22 14:27:04 crc kubenswrapper[4773]: E0122 14:27:04.209731 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.557170 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" exitCode=0 Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.557235 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187"} Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.557566 4773 scope.go:117] "RemoveContainer" containerID="f24ce25274805a35369c26a155360435b5830f3b7c26eca6af51ea6c67fa9bc6" Jan 22 14:27:04 crc kubenswrapper[4773]: I0122 14:27:04.558894 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:27:04 crc kubenswrapper[4773]: E0122 14:27:04.559813 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:27:19 crc kubenswrapper[4773]: I0122 14:27:19.658017 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:27:19 crc kubenswrapper[4773]: E0122 14:27:19.658763 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:27:31 crc kubenswrapper[4773]: I0122 14:27:31.913075 4773 generic.go:334] "Generic (PLEG): container finished" podID="938f08ac-3fcb-4667-8af5-61f91610e9df" containerID="1af58a65f114fd81891be0393bf9cb4d322789b5c642ba6da987551569c81c4f" exitCode=137 Jan 22 14:27:31 crc kubenswrapper[4773]: I0122 14:27:31.913184 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"938f08ac-3fcb-4667-8af5-61f91610e9df","Type":"ContainerDied","Data":"1af58a65f114fd81891be0393bf9cb4d322789b5c642ba6da987551569c81c4f"} Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.512152 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.618347 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzfxw\" (UniqueName: \"kubernetes.io/projected/938f08ac-3fcb-4667-8af5-61f91610e9df-kube-api-access-tzfxw\") pod \"938f08ac-3fcb-4667-8af5-61f91610e9df\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.618634 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/938f08ac-3fcb-4667-8af5-61f91610e9df-ovn-data-cert\") pod \"938f08ac-3fcb-4667-8af5-61f91610e9df\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.619333 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") pod \"938f08ac-3fcb-4667-8af5-61f91610e9df\" (UID: \"938f08ac-3fcb-4667-8af5-61f91610e9df\") " Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.626674 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/938f08ac-3fcb-4667-8af5-61f91610e9df-kube-api-access-tzfxw" (OuterVolumeSpecName: "kube-api-access-tzfxw") pod "938f08ac-3fcb-4667-8af5-61f91610e9df" (UID: "938f08ac-3fcb-4667-8af5-61f91610e9df"). InnerVolumeSpecName "kube-api-access-tzfxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.627087 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/938f08ac-3fcb-4667-8af5-61f91610e9df-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "938f08ac-3fcb-4667-8af5-61f91610e9df" (UID: "938f08ac-3fcb-4667-8af5-61f91610e9df"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.711479 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654" (OuterVolumeSpecName: "ovn-data") pod "938f08ac-3fcb-4667-8af5-61f91610e9df" (UID: "938f08ac-3fcb-4667-8af5-61f91610e9df"). InnerVolumeSpecName "pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.721698 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzfxw\" (UniqueName: \"kubernetes.io/projected/938f08ac-3fcb-4667-8af5-61f91610e9df-kube-api-access-tzfxw\") on node \"crc\" DevicePath \"\"" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.721754 4773 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/938f08ac-3fcb-4667-8af5-61f91610e9df-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.721793 4773 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") on node \"crc\" " Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.763220 4773 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.763847 4773 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654") on node "crc" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.824468 4773 reconciler_common.go:293] "Volume detached for volume \"pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-32d72340-e7f5-40d5-8b6f-a0e76a487654\") on node \"crc\" DevicePath \"\"" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.926699 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"938f08ac-3fcb-4667-8af5-61f91610e9df","Type":"ContainerDied","Data":"4fd69411df887dd8d661928114c2c404feebf7d69b9c272e0fe3db986b801376"} Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.926773 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.932421 4773 scope.go:117] "RemoveContainer" containerID="1af58a65f114fd81891be0393bf9cb4d322789b5c642ba6da987551569c81c4f" Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.985479 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 14:27:32 crc kubenswrapper[4773]: I0122 14:27:32.994985 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Jan 22 14:27:33 crc kubenswrapper[4773]: I0122 14:27:33.659246 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:27:33 crc kubenswrapper[4773]: E0122 14:27:33.660201 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:27:34 crc kubenswrapper[4773]: I0122 14:27:34.675128 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="938f08ac-3fcb-4667-8af5-61f91610e9df" path="/var/lib/kubelet/pods/938f08ac-3fcb-4667-8af5-61f91610e9df/volumes" Jan 22 14:27:45 crc kubenswrapper[4773]: I0122 14:27:45.658484 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:27:45 crc kubenswrapper[4773]: E0122 14:27:45.660143 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:27:57 crc kubenswrapper[4773]: I0122 14:27:57.658171 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:27:57 crc kubenswrapper[4773]: E0122 14:27:57.659042 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:28:11 crc kubenswrapper[4773]: I0122 14:28:11.658805 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:28:11 crc kubenswrapper[4773]: E0122 14:28:11.660153 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:28:24 crc kubenswrapper[4773]: I0122 14:28:24.659322 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:28:24 crc kubenswrapper[4773]: E0122 14:28:24.660743 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:28:36 crc kubenswrapper[4773]: I0122 14:28:36.661058 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:28:36 crc kubenswrapper[4773]: E0122 14:28:36.662604 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.333397 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-n4qb8/must-gather-9wq4j"] Jan 22 14:28:39 crc kubenswrapper[4773]: E0122 14:28:39.335098 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e89853-4593-4500-9f28-02cff2d55d23" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.335185 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e89853-4593-4500-9f28-02cff2d55d23" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Jan 22 14:28:39 crc kubenswrapper[4773]: E0122 14:28:39.335320 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="938f08ac-3fcb-4667-8af5-61f91610e9df" containerName="adoption" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.335401 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="938f08ac-3fcb-4667-8af5-61f91610e9df" containerName="adoption" Jan 22 14:28:39 crc kubenswrapper[4773]: E0122 14:28:39.335478 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="extract-utilities" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.335544 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="extract-utilities" Jan 22 14:28:39 crc kubenswrapper[4773]: E0122 14:28:39.335605 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a14d1fa5-fa02-4064-bffc-6e5bb0f67531" containerName="adoption" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.335666 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="a14d1fa5-fa02-4064-bffc-6e5bb0f67531" containerName="adoption" Jan 22 14:28:39 crc kubenswrapper[4773]: E0122 14:28:39.335749 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="registry-server" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.335812 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="registry-server" Jan 22 14:28:39 crc kubenswrapper[4773]: E0122 14:28:39.335871 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="extract-content" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.335929 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="extract-content" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.341091 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e89853-4593-4500-9f28-02cff2d55d23" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.341195 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="a14d1fa5-fa02-4064-bffc-6e5bb0f67531" containerName="adoption" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.341261 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="938f08ac-3fcb-4667-8af5-61f91610e9df" containerName="adoption" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.341371 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ed7103-61ca-49aa-94a4-0ca2ebe40e87" containerName="registry-server" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.342706 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.349055 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-n4qb8"/"kube-root-ca.crt" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.349688 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-n4qb8"/"default-dockercfg-kg4gn" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.350748 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-n4qb8"/"openshift-service-ca.crt" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.353486 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-n4qb8/must-gather-9wq4j"] Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.518174 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkkzf\" (UniqueName: \"kubernetes.io/projected/dda50136-d4d7-45b1-9cab-72103c6ae1fb-kube-api-access-rkkzf\") pod \"must-gather-9wq4j\" (UID: \"dda50136-d4d7-45b1-9cab-72103c6ae1fb\") " pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.518577 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dda50136-d4d7-45b1-9cab-72103c6ae1fb-must-gather-output\") pod \"must-gather-9wq4j\" (UID: \"dda50136-d4d7-45b1-9cab-72103c6ae1fb\") " pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.620900 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkkzf\" (UniqueName: \"kubernetes.io/projected/dda50136-d4d7-45b1-9cab-72103c6ae1fb-kube-api-access-rkkzf\") pod \"must-gather-9wq4j\" (UID: \"dda50136-d4d7-45b1-9cab-72103c6ae1fb\") " pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.621058 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dda50136-d4d7-45b1-9cab-72103c6ae1fb-must-gather-output\") pod \"must-gather-9wq4j\" (UID: \"dda50136-d4d7-45b1-9cab-72103c6ae1fb\") " pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.621605 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/dda50136-d4d7-45b1-9cab-72103c6ae1fb-must-gather-output\") pod \"must-gather-9wq4j\" (UID: \"dda50136-d4d7-45b1-9cab-72103c6ae1fb\") " pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.647051 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkkzf\" (UniqueName: \"kubernetes.io/projected/dda50136-d4d7-45b1-9cab-72103c6ae1fb-kube-api-access-rkkzf\") pod \"must-gather-9wq4j\" (UID: \"dda50136-d4d7-45b1-9cab-72103c6ae1fb\") " pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:39 crc kubenswrapper[4773]: I0122 14:28:39.665271 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/must-gather-9wq4j" Jan 22 14:28:40 crc kubenswrapper[4773]: I0122 14:28:40.299151 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-n4qb8/must-gather-9wq4j"] Jan 22 14:28:40 crc kubenswrapper[4773]: I0122 14:28:40.717018 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/must-gather-9wq4j" event={"ID":"dda50136-d4d7-45b1-9cab-72103c6ae1fb","Type":"ContainerStarted","Data":"97e89a506658b7331035847a4db898f54c6d98faa22cfa7f4a55e8f8eaff43c7"} Jan 22 14:28:48 crc kubenswrapper[4773]: I0122 14:28:48.813903 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/must-gather-9wq4j" event={"ID":"dda50136-d4d7-45b1-9cab-72103c6ae1fb","Type":"ContainerStarted","Data":"b4e0e3636bf178480aed454b755dc9bc1e2410e028d5a98d6a6619a01e552c4b"} Jan 22 14:28:48 crc kubenswrapper[4773]: I0122 14:28:48.814528 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/must-gather-9wq4j" event={"ID":"dda50136-d4d7-45b1-9cab-72103c6ae1fb","Type":"ContainerStarted","Data":"8e973c39f6b2533c5b962512907a5346099b590b4d5c173b066f84b3a8df46e6"} Jan 22 14:28:48 crc kubenswrapper[4773]: I0122 14:28:48.842784 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-n4qb8/must-gather-9wq4j" podStartSLOduration=2.1678296870000002 podStartE2EDuration="9.842764358s" podCreationTimestamp="2026-01-22 14:28:39 +0000 UTC" firstStartedPulling="2026-01-22 14:28:40.253885385 +0000 UTC m=+9227.832001210" lastFinishedPulling="2026-01-22 14:28:47.928820056 +0000 UTC m=+9235.506935881" observedRunningTime="2026-01-22 14:28:48.84211703 +0000 UTC m=+9236.420232855" watchObservedRunningTime="2026-01-22 14:28:48.842764358 +0000 UTC m=+9236.420880183" Jan 22 14:28:50 crc kubenswrapper[4773]: I0122 14:28:50.658943 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:28:50 crc kubenswrapper[4773]: E0122 14:28:50.659595 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.070665 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-jq7sj"] Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.072379 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.186529 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b711a159-97fd-4bcb-b067-68feadcd8da6-host\") pod \"crc-debug-jq7sj\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.186653 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2km9\" (UniqueName: \"kubernetes.io/projected/b711a159-97fd-4bcb-b067-68feadcd8da6-kube-api-access-d2km9\") pod \"crc-debug-jq7sj\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.289562 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2km9\" (UniqueName: \"kubernetes.io/projected/b711a159-97fd-4bcb-b067-68feadcd8da6-kube-api-access-d2km9\") pod \"crc-debug-jq7sj\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.289834 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b711a159-97fd-4bcb-b067-68feadcd8da6-host\") pod \"crc-debug-jq7sj\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.290040 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b711a159-97fd-4bcb-b067-68feadcd8da6-host\") pod \"crc-debug-jq7sj\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.309309 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2km9\" (UniqueName: \"kubernetes.io/projected/b711a159-97fd-4bcb-b067-68feadcd8da6-kube-api-access-d2km9\") pod \"crc-debug-jq7sj\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.397456 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:28:52 crc kubenswrapper[4773]: W0122 14:28:52.456921 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb711a159_97fd_4bcb_b067_68feadcd8da6.slice/crio-ae00815e94c74eaea1d06bfb0a896f88e761696d021ce0a4f3f48b2d0397d5cf WatchSource:0}: Error finding container ae00815e94c74eaea1d06bfb0a896f88e761696d021ce0a4f3f48b2d0397d5cf: Status 404 returned error can't find the container with id ae00815e94c74eaea1d06bfb0a896f88e761696d021ce0a4f3f48b2d0397d5cf Jan 22 14:28:52 crc kubenswrapper[4773]: I0122 14:28:52.856658 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" event={"ID":"b711a159-97fd-4bcb-b067-68feadcd8da6","Type":"ContainerStarted","Data":"ae00815e94c74eaea1d06bfb0a896f88e761696d021ce0a4f3f48b2d0397d5cf"} Jan 22 14:28:54 crc kubenswrapper[4773]: I0122 14:28:54.990277 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_7599c514-2d2d-4ec6-b004-a4a0d31f032a/alertmanager/0.log" Jan 22 14:28:54 crc kubenswrapper[4773]: I0122 14:28:54.997488 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_7599c514-2d2d-4ec6-b004-a4a0d31f032a/config-reloader/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.022207 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_7599c514-2d2d-4ec6-b004-a4a0d31f032a/init-config-reloader/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.046860 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_26a329e1-f0b6-46bb-98c5-6c094855f241/aodh-api/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.067248 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_26a329e1-f0b6-46bb-98c5-6c094855f241/aodh-evaluator/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.072927 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_26a329e1-f0b6-46bb-98c5-6c094855f241/aodh-notifier/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.082683 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_26a329e1-f0b6-46bb-98c5-6c094855f241/aodh-listener/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.122431 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f865655b4-mkvnv_2af597d8-3c79-478e-aa8d-1bdeea5c2ba3/barbican-api-log/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.131131 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f865655b4-mkvnv_2af597d8-3c79-478e-aa8d-1bdeea5c2ba3/barbican-api/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.159131 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6bd66988c8-tblx4_3655d186-9110-4e00-b952-c587afca8c0f/barbican-keystone-listener-log/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.167695 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6bd66988c8-tblx4_3655d186-9110-4e00-b952-c587afca8c0f/barbican-keystone-listener/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.192202 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5ff89786dc-wphr7_10dffa1a-2b65-4a28-a990-5ed8a0db0943/barbican-worker-log/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.197453 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5ff89786dc-wphr7_10dffa1a-2b65-4a28-a990-5ed8a0db0943/barbican-worker/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.246881 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-r6264_a4e419ee-625b-4922-b9d7-4aefc6eae65b/bootstrap-openstack-openstack-cell1/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.287450 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b2ba1736-246e-42ae-9249-e635b29993ae/ceilometer-central-agent/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.331226 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b2ba1736-246e-42ae-9249-e635b29993ae/ceilometer-notification-agent/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.341960 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b2ba1736-246e-42ae-9249-e635b29993ae/sg-core/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.360973 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b2ba1736-246e-42ae-9249-e635b29993ae/proxy-httpd/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.376728 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a903c09b-df78-4752-8e8c-4b711299d7a5/cinder-api-log/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.470163 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_a903c09b-df78-4752-8e8c-4b711299d7a5/cinder-api/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.511680 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_ea298d44-91e6-4904-82fd-31d7588d7980/cinder-scheduler/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.576214 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_ea298d44-91e6-4904-82fd-31d7588d7980/probe/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.592878 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-lpznb_df86d55f-325a-4df7-9275-8b64fa37a759/configure-network-openstack-openstack-cell1/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.624922 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-kpr4l_0c3b769b-d2e7-4491-b723-fdd503391eff/configure-os-openstack-openstack-cell1/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.670171 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78669f889-54nfc_4918760f-8cd5-4b8e-9cd3-623967f73d9d/dnsmasq-dns/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.679655 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-78669f889-54nfc_4918760f-8cd5-4b8e-9cd3-623967f73d9d/init/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.705153 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-pg6sn_537d68a1-fe75-4e50-8c4b-c9e4c0ab7899/download-cache-openstack-openstack-cell1/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.718327 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5464273f-aca6-46a1-bfdb-15c7d174be31/glance-log/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.758507 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_5464273f-aca6-46a1-bfdb-15c7d174be31/glance-httpd/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.779164 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f9820bd1-4334-42ed-94ad-fe0c21440458/glance-log/0.log" Jan 22 14:28:55 crc kubenswrapper[4773]: I0122 14:28:55.945661 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f9820bd1-4334-42ed-94ad-fe0c21440458/glance-httpd/0.log" Jan 22 14:28:56 crc kubenswrapper[4773]: I0122 14:28:56.298619 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-5c7548b94d-xvrz6_15dbb5e0-8d05-416b-b268-906db6b67cf8/heat-api/0.log" Jan 22 14:28:56 crc kubenswrapper[4773]: I0122 14:28:56.621232 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-68567c67d-p7vzc_01ff0cd8-8d9d-4666-bd26-dde1424eb01f/heat-cfnapi/0.log" Jan 22 14:28:56 crc kubenswrapper[4773]: I0122 14:28:56.641280 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-54c5697d4f-hg5lj_93a64aef-7e70-4977-8c33-203a4508abf2/heat-engine/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.014266 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64d88cbf74-c2bxb_16615494-47e4-428d-b631-e41b55192f74/horizon-log/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.103526 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-64d88cbf74-c2bxb_16615494-47e4-428d-b631-e41b55192f74/horizon/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.134764 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-t788q_f7ede466-9619-44bb-af40-1dd8e773f7b3/install-certs-openstack-openstack-cell1/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.159797 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-hbg4m_648e60a4-965f-456b-8b70-927b15c1a692/install-os-openstack-openstack-cell1/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.350683 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-54c687bbf4-87mjw_11af641b-1498-4f72-885a-35b8531e251c/keystone-api/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.369128 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29484841-g7cvq_adfe4107-4d17-40d6-8531-9e1a2a2f7ec8/keystone-cron/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.382193 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_8d7992be-21ed-446d-bf12-1adeccd86d66/kube-state-metrics/0.log" Jan 22 14:28:57 crc kubenswrapper[4773]: I0122 14:28:57.414783 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-vmjs4_cc3dfbd1-565d-4291-806a-65c53fd7b75f/libvirt-openstack-openstack-cell1/0.log" Jan 22 14:29:01 crc kubenswrapper[4773]: I0122 14:29:01.659006 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:29:01 crc kubenswrapper[4773]: E0122 14:29:01.659604 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:29:04 crc kubenswrapper[4773]: I0122 14:29:04.917612 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_83b59e90-0bfd-47a7-8087-6c4689bbb0bd/memcached/0.log" Jan 22 14:29:04 crc kubenswrapper[4773]: I0122 14:29:04.996141 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c649475bf-5dr4c_1a2c7ca2-94cf-4302-8bad-8593c9906521/neutron-api/0.log" Jan 22 14:29:05 crc kubenswrapper[4773]: I0122 14:29:05.038099 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5c649475bf-5dr4c_1a2c7ca2-94cf-4302-8bad-8593c9906521/neutron-httpd/0.log" Jan 22 14:29:05 crc kubenswrapper[4773]: I0122 14:29:05.064553 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-j7dqc_42c256b3-a909-4067-892d-520fc7972418/neutron-dhcp-openstack-openstack-cell1/0.log" Jan 22 14:29:05 crc kubenswrapper[4773]: I0122 14:29:05.094576 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-vhsn4_1cacf4a2-5633-412a-8e19-0f0c81b673c0/neutron-metadata-openstack-openstack-cell1/0.log" Jan 22 14:29:05 crc kubenswrapper[4773]: I0122 14:29:05.116733 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-7mndd_5eb9f34d-70a6-4862-a2af-2d9811f5610a/neutron-sriov-openstack-openstack-cell1/0.log" Jan 22 14:29:05 crc kubenswrapper[4773]: I0122 14:29:05.221702 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a7a91662-5b00-4b02-8c60-7abddc9b9de5/nova-api-log/0.log" Jan 22 14:29:05 crc kubenswrapper[4773]: I0122 14:29:05.921772 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a7a91662-5b00-4b02-8c60-7abddc9b9de5/nova-api-api/0.log" Jan 22 14:29:06 crc kubenswrapper[4773]: I0122 14:29:06.011230 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b10fb344-5f69-482e-aa3b-4e5a0bbf1408/nova-cell0-conductor-conductor/0.log" Jan 22 14:29:06 crc kubenswrapper[4773]: I0122 14:29:06.128486 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d4d650f9-c96b-40d4-8ef9-e7c437571f32/nova-cell1-conductor-conductor/0.log" Jan 22 14:29:06 crc kubenswrapper[4773]: I0122 14:29:06.689322 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c34aa5c6-00d7-48d2-97ea-ad35825b107d/nova-cell1-novncproxy-novncproxy/0.log" Jan 22 14:29:06 crc kubenswrapper[4773]: I0122 14:29:06.740477 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbhgtz_69e89853-4593-4500-9f28-02cff2d55d23/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Jan 22 14:29:06 crc kubenswrapper[4773]: I0122 14:29:06.777293 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-flgnp_48876acf-700b-43be-94ae-7773bf8e8cbf/nova-cell1-openstack-openstack-cell1/0.log" Jan 22 14:29:06 crc kubenswrapper[4773]: I0122 14:29:06.841963 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e9e640f7-eedc-45e6-b379-3f78fdcca5f2/nova-metadata-log/0.log" Jan 22 14:29:07 crc kubenswrapper[4773]: I0122 14:29:07.312315 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e9e640f7-eedc-45e6-b379-3f78fdcca5f2/nova-metadata-metadata/0.log" Jan 22 14:29:07 crc kubenswrapper[4773]: I0122 14:29:07.587712 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b47a6230-96aa-47e7-b785-4f2a3a40ca16/nova-scheduler-scheduler/0.log" Jan 22 14:29:08 crc kubenswrapper[4773]: I0122 14:29:08.030688 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-76b5ddccbc-vrfvq_badfc18b-9cbe-4976-b7af-d6c1ec52a1c8/octavia-api/0.log" Jan 22 14:29:08 crc kubenswrapper[4773]: I0122 14:29:08.566949 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-76b5ddccbc-vrfvq_badfc18b-9cbe-4976-b7af-d6c1ec52a1c8/octavia-api-provider-agent/0.log" Jan 22 14:29:08 crc kubenswrapper[4773]: I0122 14:29:08.579806 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-76b5ddccbc-vrfvq_badfc18b-9cbe-4976-b7af-d6c1ec52a1c8/init/0.log" Jan 22 14:29:08 crc kubenswrapper[4773]: I0122 14:29:08.840667 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-nj8qw_d8974658-8091-4884-ad44-7fe8331a02c9/octavia-healthmanager/0.log" Jan 22 14:29:08 crc kubenswrapper[4773]: I0122 14:29:08.913219 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-nj8qw_d8974658-8091-4884-ad44-7fe8331a02c9/init/0.log" Jan 22 14:29:08 crc kubenswrapper[4773]: I0122 14:29:08.944187 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-9nwnm_8ffe7dee-c700-48d6-8231-5d67ba8cd887/octavia-housekeeping/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.104322 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-9nwnm_8ffe7dee-c700-48d6-8231-5d67ba8cd887/init/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.113844 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-7b97d6bc64-rnpb2_cec4fc5b-d5a5-4197-8873-e93b2c58985b/octavia-amphora-httpd/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.137493 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-7b97d6bc64-rnpb2_cec4fc5b-d5a5-4197-8873-e93b2c58985b/init/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.149918 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-b2pfg_ef162fcd-6b27-4406-ac32-b6efa877ed0f/octavia-rsyslog/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.285332 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-b2pfg_ef162fcd-6b27-4406-ac32-b6efa877ed0f/init/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.584050 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-6vd2t_d878cddf-50a9-4d56-b6ff-872694625d2e/octavia-worker/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.593057 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-6vd2t_d878cddf-50a9-4d56-b6ff-872694625d2e/init/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: E0122 14:29:09.607176 4773 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296" Jan 22 14:29:09 crc kubenswrapper[4773]: E0122 14:29:09.607404 4773 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:container-00,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296,Command:[chroot /host bash -c echo 'TOOLBOX_NAME=toolbox-osp' > /root/.toolboxrc ; rm -rf \"/var/tmp/sos-osp\" && mkdir -p \"/var/tmp/sos-osp\" && sudo podman rm --force toolbox-osp; sudo --preserve-env podman pull --authfile /var/lib/kubelet/config.json registry.redhat.io/rhel9/support-tools && toolbox sos report --batch --all-logs --only-plugins block,cifs,crio,devicemapper,devices,firewall_tables,firewalld,iscsi,lvm2,memory,multipath,nfs,nis,nvme,podman,process,processor,selinux,scsi,udev,logs,crypto --tmp-dir=\"/var/tmp/sos-osp\" && if [[ \"$(ls /var/log/pods/*/{*.log.*,*/*.log.*} 2>/dev/null)\" != '' ]]; then tar --ignore-failed-read --warning=no-file-changed -cJf \"/var/tmp/sos-osp/podlogs.tar.xz\" --transform 's,^,podlogs/,' /var/log/pods/*/{*.log.*,*/*.log.*} || true; fi],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:TMOUT,Value:900,ValueFrom:nil,},EnvVar{Name:HOST,Value:/host,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host,ReadOnly:false,MountPath:/host,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d2km9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod crc-debug-jq7sj_openshift-must-gather-n4qb8(b711a159-97fd-4bcb-b067-68feadcd8da6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 14:29:09 crc kubenswrapper[4773]: E0122 14:29:09.608797 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" podUID="b711a159-97fd-4bcb-b067-68feadcd8da6" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.615983 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_344a28a5-4b8d-4f67-8165-5cff172af873/galera/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.630534 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_344a28a5-4b8d-4f67-8165-5cff172af873/mysql-bootstrap/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.648704 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_812c3f46-cf97-4dc8-9c19-68477c9f8eca/galera/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.662227 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_812c3f46-cf97-4dc8-9c19-68477c9f8eca/mysql-bootstrap/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.671819 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_88b4759c-3fdf-4b34-9420-03eb7c3dd19d/openstackclient/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.681970 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-vxlqf_50cd75a4-0c13-4ab5-bcdc-dacafe2e00a0/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.695901 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m9h5_3e9cd77c-960b-41ac-b305-9f79452beb10/ovsdb-server/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.706857 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m9h5_3e9cd77c-960b-41ac-b305-9f79452beb10/ovs-vswitchd/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.720421 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5m9h5_3e9cd77c-960b-41ac-b305-9f79452beb10/ovsdb-server-init/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.738648 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-v6vjx_ed191d8f-9eb0-4bc6-aa42-62ea1d99657a/ovn-controller/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.750860 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_47b6e6f2-15bb-47c7-bf7b-b4c925e27d36/ovn-northd/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.756466 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_47b6e6f2-15bb-47c7-bf7b-b4c925e27d36/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.799899 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-rvtvw_5f66e0f1-ce22-40a2-b251-c010c5c57aa9/ovn-openstack-openstack-cell1/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.815701 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4/ovsdbserver-nb/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.822635 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7fb0c726-7d2f-4a39-b42a-c5b3c030d9d4/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.842140 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_d74ce6eb-4311-4bd6-9aa9-2e13183e5180/ovsdbserver-nb/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.849008 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_d74ce6eb-4311-4bd6-9aa9-2e13183e5180/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.866861 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_96ed693c-6314-414e-84eb-5d82d4ec2ff8/ovsdbserver-nb/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.875342 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_96ed693c-6314-414e-84eb-5d82d4ec2ff8/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.896871 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d/ovsdbserver-sb/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.905089 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_aed82f10-ad3d-4453-bfd0-9d24b8bbaa7d/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.927963 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_65b8eedc-da58-466b-9774-eb79bc70e3f6/ovsdbserver-sb/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.933916 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_65b8eedc-da58-466b-9774-eb79bc70e3f6/openstack-network-exporter/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.955516 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a3200fab-2f90-472d-a9d8-7d75debdc065/ovsdbserver-sb/0.log" Jan 22 14:29:09 crc kubenswrapper[4773]: I0122 14:29:09.961328 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_a3200fab-2f90-472d-a9d8-7d75debdc065/openstack-network-exporter/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.019308 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7dc858bb76-pr6m5_e597eff2-b887-4521-8118-3ff1fb86742c/placement-log/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.061049 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7dc858bb76-pr6m5_e597eff2-b887-4521-8118-3ff1fb86742c/placement-api/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.089345 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cd5vbz_654d399b-e1b1-41bd-86e4-a1806f829a79/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.114784 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a/prometheus/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.124067 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a/config-reloader/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.134722 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a/thanos-sidecar/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.146521 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_4aef4bbd-0a65-4d1f-9846-b18a4dc4bb9a/init-config-reloader/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: E0122 14:29:10.150103 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"container-00\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:6ab858aed98e4fe57e6b144da8e90ad5d6698bb4cc5521206f5c05809f0f9296\\\"\"" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" podUID="b711a159-97fd-4bcb-b067-68feadcd8da6" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.222847 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c1f14d87-f341-4c16-900b-7aa0878c9a84/rabbitmq/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.228336 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c1f14d87-f341-4c16-900b-7aa0878c9a84/setup-container/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.266582 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf/rabbitmq/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.272362 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_bb7fda1d-aa46-45d0-9af5-7f155f7d6fdf/setup-container/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.290939 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-lnlt2_d673e2cc-043b-49a8-9426-715a50fdc54c/reboot-os-openstack-openstack-cell1/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.309589 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-dxj54_0e69d61d-845b-4cfe-97aa-2b2f5fde0040/run-os-openstack-openstack-cell1/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.322524 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-d25vw_10d12b02-ba12-4fa5-b5f0-8b898ba5141e/ssh-known-hosts-openstack/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.462160 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6ddb845654-drmk5_38517212-420f-4320-ab7f-59390933f529/proxy-httpd/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.479469 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6ddb845654-drmk5_38517212-420f-4320-ab7f-59390933f529/proxy-server/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.489114 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-2k9nw_2345af62-3a63-4f5d-8a09-e33ee507c372/swift-ring-rebalance/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.548112 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-b4pq2_f6608269-1f5b-47d0-9cd4-fcf4410bb039/telemetry-openstack-openstack-cell1/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.588454 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-6792s_69b32bbb-8065-4c41-91bb-3f13e9e321d0/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Jan 22 14:29:10 crc kubenswrapper[4773]: I0122 14:29:10.603440 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-w5d4l_35ac9085-7569-42c7-a13f-a4c1101dc438/validate-network-openstack-openstack-cell1/0.log" Jan 22 14:29:16 crc kubenswrapper[4773]: I0122 14:29:16.658070 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:29:16 crc kubenswrapper[4773]: E0122 14:29:16.659054 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:29:20 crc kubenswrapper[4773]: I0122 14:29:20.050907 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7vtfm_161e16b2-16b1-4485-aa07-4f8a766d347f/controller/0.log" Jan 22 14:29:20 crc kubenswrapper[4773]: I0122 14:29:20.061044 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7vtfm_161e16b2-16b1-4485-aa07-4f8a766d347f/kube-rbac-proxy/0.log" Jan 22 14:29:20 crc kubenswrapper[4773]: I0122 14:29:20.088240 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/controller/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.281642 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" event={"ID":"b711a159-97fd-4bcb-b067-68feadcd8da6","Type":"ContainerStarted","Data":"07564a94ae77942436da811bf37cf4f1aec7e80b6ccf912905462b8c85091a74"} Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.307608 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" podStartSLOduration=1.115678005 podStartE2EDuration="31.307584507s" podCreationTimestamp="2026-01-22 14:28:52 +0000 UTC" firstStartedPulling="2026-01-22 14:28:52.460850397 +0000 UTC m=+9240.038966232" lastFinishedPulling="2026-01-22 14:29:22.652756909 +0000 UTC m=+9270.230872734" observedRunningTime="2026-01-22 14:29:23.298936732 +0000 UTC m=+9270.877052557" watchObservedRunningTime="2026-01-22 14:29:23.307584507 +0000 UTC m=+9270.885700332" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.612947 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/frr/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.625761 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/reloader/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.631860 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/frr-metrics/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.637755 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/kube-rbac-proxy/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.645626 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/kube-rbac-proxy-frr/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.654129 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-frr-files/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.659245 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-reloader/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.671173 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-metrics/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.693081 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-s46dq_941adce7-fcb2-4191-920a-e9279f9ac0db/frr-k8s-webhook-server/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.754535 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-75c7758c8d-jv4lh_acc4b660-fb70-4713-ba23-597608298bc1/manager/0.log" Jan 22 14:29:23 crc kubenswrapper[4773]: I0122 14:29:23.765205 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-789dcb66b5-4k49c_3ada08ed-114c-4b1f-9110-9d4d60edaa27/webhook-server/0.log" Jan 22 14:29:24 crc kubenswrapper[4773]: I0122 14:29:24.627729 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8d28f_8f8f8131-f668-4d0c-be50-21c1eb7b62ff/speaker/0.log" Jan 22 14:29:24 crc kubenswrapper[4773]: I0122 14:29:24.633669 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8d28f_8f8f8131-f668-4d0c-be50-21c1eb7b62ff/kube-rbac-proxy/0.log" Jan 22 14:29:31 crc kubenswrapper[4773]: I0122 14:29:31.658404 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:29:31 crc kubenswrapper[4773]: E0122 14:29:31.659202 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.598578 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-9nlp4_73fae2b3-a45d-431f-9113-7f669d5eff6d/manager/0.log" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.681126 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-k7bqw_e0e2887e-f5a6-48e6-862c-593b909d5514/manager/0.log" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.713986 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-vkt5m_4429281b-22db-4df9-8c28-bb30e527b1f6/manager/0.log" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.736970 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/extract/0.log" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.743048 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/util/0.log" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.753818 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/pull/0.log" Jan 22 14:29:39 crc kubenswrapper[4773]: I0122 14:29:39.941312 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-lm8jc_a1965d1a-cb7f-4da6-90ab-1f75449e3e97/manager/0.log" Jan 22 14:29:40 crc kubenswrapper[4773]: I0122 14:29:40.014115 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-br2hg_54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0/manager/0.log" Jan 22 14:29:40 crc kubenswrapper[4773]: I0122 14:29:40.043684 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-ztsqs_9aeccfa1-86f0-47e3-96c7-e0d018d24537/manager/0.log" Jan 22 14:29:41 crc kubenswrapper[4773]: I0122 14:29:41.800995 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="47b6e6f2-15bb-47c7-bf7b-b4c925e27d36" containerName="ovn-northd" probeResult="failure" output="command timed out" Jan 22 14:29:41 crc kubenswrapper[4773]: I0122 14:29:41.801041 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-northd-0" podUID="47b6e6f2-15bb-47c7-bf7b-b4c925e27d36" containerName="ovn-northd" probeResult="failure" output="command timed out" Jan 22 14:29:42 crc kubenswrapper[4773]: I0122 14:29:42.514960 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-dlzxj_29637656-53e6-4957-88ea-2445b706ec08/manager/0.log" Jan 22 14:29:42 crc kubenswrapper[4773]: I0122 14:29:42.529717 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-7mdlm_b6ef28d3-92bf-43a7-a577-c4ac162ab48a/manager/0.log" Jan 22 14:29:42 crc kubenswrapper[4773]: I0122 14:29:42.913871 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-x6894_02d2e417-7591-4d07-850e-4c670b40d1ea/manager/0.log" Jan 22 14:29:42 crc kubenswrapper[4773]: I0122 14:29:42.924392 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-sd745_cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e/manager/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.002620 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-rjsjw_be3892a0-8b94-459b-9d05-9aae47107554/manager/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.093319 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-vlvhw_6e27f8a3-a214-47ec-9027-1a503d588e59/manager/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.319007 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b8bc8d87d-t4ws7_639db363-b628-4c24-be20-57a1bf05c986/manager/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.405204 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-qwf6d_85ad5e61-8c92-4856-a34c-7d02aadbbc43/manager/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.429515 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7c9c58b55799pgj_fd720cc2-9948-4a4d-951f-17a20558e0e2/manager/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.591250 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-698d6bb84b-m5n97_5ddce1ac-9a5f-4096-b21f-77dc07b68c2d/operator/0.log" Jan 22 14:29:43 crc kubenswrapper[4773]: I0122 14:29:43.657967 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:29:43 crc kubenswrapper[4773]: E0122 14:29:43.658807 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:29:46 crc kubenswrapper[4773]: I0122 14:29:46.678553 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-788c8b99b5-fh84n_4947f3f4-af75-45ed-9481-f4c8f3e525d8/manager/0.log" Jan 22 14:29:46 crc kubenswrapper[4773]: I0122 14:29:46.821353 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-jvwbz_d86424e7-7383-4bbe-8610-2ac8fdc8143a/registry-server/0.log" Jan 22 14:29:46 crc kubenswrapper[4773]: I0122 14:29:46.934400 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-lx75p_813b6b20-e15b-4da5-959a-d719f973a4e5/manager/0.log" Jan 22 14:29:46 crc kubenswrapper[4773]: I0122 14:29:46.971496 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-h6ggd_592864bf-34a6-4335-9425-72386e772818/manager/0.log" Jan 22 14:29:47 crc kubenswrapper[4773]: I0122 14:29:47.002055 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-qgnch_7da5c99e-8a28-4671-b7fb-43ec8b4d6faf/operator/0.log" Jan 22 14:29:47 crc kubenswrapper[4773]: I0122 14:29:47.064372 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-msvwn_7e2cd62a-874a-4d1b-a706-439c0e7756c0/manager/0.log" Jan 22 14:29:47 crc kubenswrapper[4773]: I0122 14:29:47.294567 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-rx8n5_d4fe0850-2785-4433-8e0a-28efdea91b64/manager/0.log" Jan 22 14:29:47 crc kubenswrapper[4773]: I0122 14:29:47.304160 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-9gcmw_e817d93c-c5c3-44ed-92aa-e761dda7eaa9/manager/0.log" Jan 22 14:29:47 crc kubenswrapper[4773]: I0122 14:29:47.324912 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-ptq9w_9731c1da-ba60-4e4a-af76-4e870c0f6e35/manager/0.log" Jan 22 14:29:49 crc kubenswrapper[4773]: I0122 14:29:49.066140 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2hwmg_2e31d232-2138-4dd6-9b7d-71f87e414a01/control-plane-machine-set-operator/0.log" Jan 22 14:29:49 crc kubenswrapper[4773]: I0122 14:29:49.082267 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jm24g_1a76da48-177b-429e-a136-d78afeae02aa/kube-rbac-proxy/0.log" Jan 22 14:29:49 crc kubenswrapper[4773]: I0122 14:29:49.091589 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jm24g_1a76da48-177b-429e-a136-d78afeae02aa/machine-api-operator/0.log" Jan 22 14:29:58 crc kubenswrapper[4773]: I0122 14:29:58.659134 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:29:58 crc kubenswrapper[4773]: E0122 14:29:58.660201 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.158118 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp"] Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.161144 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.173779 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp"] Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.226486 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.226964 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.338319 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnrvk\" (UniqueName: \"kubernetes.io/projected/fc2ed8a9-37f2-4e23-8937-945c879bf347-kube-api-access-pnrvk\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.338765 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fc2ed8a9-37f2-4e23-8937-945c879bf347-secret-volume\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.338999 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fc2ed8a9-37f2-4e23-8937-945c879bf347-config-volume\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.442986 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnrvk\" (UniqueName: \"kubernetes.io/projected/fc2ed8a9-37f2-4e23-8937-945c879bf347-kube-api-access-pnrvk\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.443365 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fc2ed8a9-37f2-4e23-8937-945c879bf347-secret-volume\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.443786 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fc2ed8a9-37f2-4e23-8937-945c879bf347-config-volume\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.445012 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fc2ed8a9-37f2-4e23-8937-945c879bf347-config-volume\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.450466 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fc2ed8a9-37f2-4e23-8937-945c879bf347-secret-volume\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.464952 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnrvk\" (UniqueName: \"kubernetes.io/projected/fc2ed8a9-37f2-4e23-8937-945c879bf347-kube-api-access-pnrvk\") pod \"collect-profiles-29484870-5g6kp\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:00 crc kubenswrapper[4773]: I0122 14:30:00.549080 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:01 crc kubenswrapper[4773]: I0122 14:30:01.104668 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp"] Jan 22 14:30:01 crc kubenswrapper[4773]: W0122 14:30:01.106331 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc2ed8a9_37f2_4e23_8937_945c879bf347.slice/crio-8b8833741f1c48ac50283538047a47b65d5c958d1b6a88cdbb01ed796c990ff2 WatchSource:0}: Error finding container 8b8833741f1c48ac50283538047a47b65d5c958d1b6a88cdbb01ed796c990ff2: Status 404 returned error can't find the container with id 8b8833741f1c48ac50283538047a47b65d5c958d1b6a88cdbb01ed796c990ff2 Jan 22 14:30:01 crc kubenswrapper[4773]: I0122 14:30:01.933515 4773 generic.go:334] "Generic (PLEG): container finished" podID="fc2ed8a9-37f2-4e23-8937-945c879bf347" containerID="ab5969e00a4fd6f9b4cb7a66aac2ef5b72a9a3eeb71759f4f3324372b5d2e412" exitCode=0 Jan 22 14:30:01 crc kubenswrapper[4773]: I0122 14:30:01.933628 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" event={"ID":"fc2ed8a9-37f2-4e23-8937-945c879bf347","Type":"ContainerDied","Data":"ab5969e00a4fd6f9b4cb7a66aac2ef5b72a9a3eeb71759f4f3324372b5d2e412"} Jan 22 14:30:01 crc kubenswrapper[4773]: I0122 14:30:01.933826 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" event={"ID":"fc2ed8a9-37f2-4e23-8937-945c879bf347","Type":"ContainerStarted","Data":"8b8833741f1c48ac50283538047a47b65d5c958d1b6a88cdbb01ed796c990ff2"} Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.234032 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gmrrp"] Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.236738 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.269690 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gmrrp"] Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.310710 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-utilities\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.310757 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-catalog-content\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.310984 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29n9c\" (UniqueName: \"kubernetes.io/projected/38978cc9-f8e1-4c57-bf80-ca1c147baecc-kube-api-access-29n9c\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.332136 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.411934 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnrvk\" (UniqueName: \"kubernetes.io/projected/fc2ed8a9-37f2-4e23-8937-945c879bf347-kube-api-access-pnrvk\") pod \"fc2ed8a9-37f2-4e23-8937-945c879bf347\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.412155 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fc2ed8a9-37f2-4e23-8937-945c879bf347-config-volume\") pod \"fc2ed8a9-37f2-4e23-8937-945c879bf347\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.412388 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fc2ed8a9-37f2-4e23-8937-945c879bf347-secret-volume\") pod \"fc2ed8a9-37f2-4e23-8937-945c879bf347\" (UID: \"fc2ed8a9-37f2-4e23-8937-945c879bf347\") " Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.412748 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29n9c\" (UniqueName: \"kubernetes.io/projected/38978cc9-f8e1-4c57-bf80-ca1c147baecc-kube-api-access-29n9c\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.412829 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-utilities\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.412850 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-catalog-content\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.412977 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc2ed8a9-37f2-4e23-8937-945c879bf347-config-volume" (OuterVolumeSpecName: "config-volume") pod "fc2ed8a9-37f2-4e23-8937-945c879bf347" (UID: "fc2ed8a9-37f2-4e23-8937-945c879bf347"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.413317 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-catalog-content\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.413369 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-utilities\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.434042 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc2ed8a9-37f2-4e23-8937-945c879bf347-kube-api-access-pnrvk" (OuterVolumeSpecName: "kube-api-access-pnrvk") pod "fc2ed8a9-37f2-4e23-8937-945c879bf347" (UID: "fc2ed8a9-37f2-4e23-8937-945c879bf347"). InnerVolumeSpecName "kube-api-access-pnrvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.440682 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29n9c\" (UniqueName: \"kubernetes.io/projected/38978cc9-f8e1-4c57-bf80-ca1c147baecc-kube-api-access-29n9c\") pod \"redhat-marketplace-gmrrp\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.447880 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc2ed8a9-37f2-4e23-8937-945c879bf347-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fc2ed8a9-37f2-4e23-8937-945c879bf347" (UID: "fc2ed8a9-37f2-4e23-8937-945c879bf347"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.515326 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fc2ed8a9-37f2-4e23-8937-945c879bf347-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.515665 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fc2ed8a9-37f2-4e23-8937-945c879bf347-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.515683 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnrvk\" (UniqueName: \"kubernetes.io/projected/fc2ed8a9-37f2-4e23-8937-945c879bf347-kube-api-access-pnrvk\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.648664 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.963825 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" event={"ID":"fc2ed8a9-37f2-4e23-8937-945c879bf347","Type":"ContainerDied","Data":"8b8833741f1c48ac50283538047a47b65d5c958d1b6a88cdbb01ed796c990ff2"} Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.964154 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b8833741f1c48ac50283538047a47b65d5c958d1b6a88cdbb01ed796c990ff2" Jan 22 14:30:03 crc kubenswrapper[4773]: I0122 14:30:03.963917 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp" Jan 22 14:30:04 crc kubenswrapper[4773]: I0122 14:30:04.135924 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gmrrp"] Jan 22 14:30:04 crc kubenswrapper[4773]: I0122 14:30:04.423526 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg"] Jan 22 14:30:04 crc kubenswrapper[4773]: I0122 14:30:04.433891 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484825-wqrtg"] Jan 22 14:30:04 crc kubenswrapper[4773]: I0122 14:30:04.676515 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b817b142-11f2-4a47-a22f-9c069367fdad" path="/var/lib/kubelet/pods/b817b142-11f2-4a47-a22f-9c069367fdad/volumes" Jan 22 14:30:04 crc kubenswrapper[4773]: W0122 14:30:04.808355 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38978cc9_f8e1_4c57_bf80_ca1c147baecc.slice/crio-9c13612aea84bb264f2deee83a3fab13de09b0bd38d01dce91b0bf6f6096d070 WatchSource:0}: Error finding container 9c13612aea84bb264f2deee83a3fab13de09b0bd38d01dce91b0bf6f6096d070: Status 404 returned error can't find the container with id 9c13612aea84bb264f2deee83a3fab13de09b0bd38d01dce91b0bf6f6096d070 Jan 22 14:30:04 crc kubenswrapper[4773]: I0122 14:30:04.977401 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gmrrp" event={"ID":"38978cc9-f8e1-4c57-bf80-ca1c147baecc","Type":"ContainerStarted","Data":"9c13612aea84bb264f2deee83a3fab13de09b0bd38d01dce91b0bf6f6096d070"} Jan 22 14:30:05 crc kubenswrapper[4773]: I0122 14:30:05.988220 4773 generic.go:334] "Generic (PLEG): container finished" podID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerID="2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db" exitCode=0 Jan 22 14:30:05 crc kubenswrapper[4773]: I0122 14:30:05.988304 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gmrrp" event={"ID":"38978cc9-f8e1-4c57-bf80-ca1c147baecc","Type":"ContainerDied","Data":"2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db"} Jan 22 14:30:05 crc kubenswrapper[4773]: I0122 14:30:05.990658 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:30:08 crc kubenswrapper[4773]: I0122 14:30:08.014586 4773 generic.go:334] "Generic (PLEG): container finished" podID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerID="1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe" exitCode=0 Jan 22 14:30:08 crc kubenswrapper[4773]: I0122 14:30:08.015632 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gmrrp" event={"ID":"38978cc9-f8e1-4c57-bf80-ca1c147baecc","Type":"ContainerDied","Data":"1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe"} Jan 22 14:30:09 crc kubenswrapper[4773]: I0122 14:30:09.027620 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gmrrp" event={"ID":"38978cc9-f8e1-4c57-bf80-ca1c147baecc","Type":"ContainerStarted","Data":"6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80"} Jan 22 14:30:09 crc kubenswrapper[4773]: I0122 14:30:09.031024 4773 generic.go:334] "Generic (PLEG): container finished" podID="b711a159-97fd-4bcb-b067-68feadcd8da6" containerID="07564a94ae77942436da811bf37cf4f1aec7e80b6ccf912905462b8c85091a74" exitCode=0 Jan 22 14:30:09 crc kubenswrapper[4773]: I0122 14:30:09.031052 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" event={"ID":"b711a159-97fd-4bcb-b067-68feadcd8da6","Type":"ContainerDied","Data":"07564a94ae77942436da811bf37cf4f1aec7e80b6ccf912905462b8c85091a74"} Jan 22 14:30:09 crc kubenswrapper[4773]: I0122 14:30:09.073833 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gmrrp" podStartSLOduration=3.556926675 podStartE2EDuration="6.073805094s" podCreationTimestamp="2026-01-22 14:30:03 +0000 UTC" firstStartedPulling="2026-01-22 14:30:05.990453431 +0000 UTC m=+9313.568569256" lastFinishedPulling="2026-01-22 14:30:08.50733186 +0000 UTC m=+9316.085447675" observedRunningTime="2026-01-22 14:30:09.045329217 +0000 UTC m=+9316.623445032" watchObservedRunningTime="2026-01-22 14:30:09.073805094 +0000 UTC m=+9316.651920939" Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.192088 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.246544 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-jq7sj"] Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.265253 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-jq7sj"] Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.269239 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b711a159-97fd-4bcb-b067-68feadcd8da6-host\") pod \"b711a159-97fd-4bcb-b067-68feadcd8da6\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.269651 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2km9\" (UniqueName: \"kubernetes.io/projected/b711a159-97fd-4bcb-b067-68feadcd8da6-kube-api-access-d2km9\") pod \"b711a159-97fd-4bcb-b067-68feadcd8da6\" (UID: \"b711a159-97fd-4bcb-b067-68feadcd8da6\") " Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.269869 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b711a159-97fd-4bcb-b067-68feadcd8da6-host" (OuterVolumeSpecName: "host") pod "b711a159-97fd-4bcb-b067-68feadcd8da6" (UID: "b711a159-97fd-4bcb-b067-68feadcd8da6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.270960 4773 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b711a159-97fd-4bcb-b067-68feadcd8da6-host\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.276933 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b711a159-97fd-4bcb-b067-68feadcd8da6-kube-api-access-d2km9" (OuterVolumeSpecName: "kube-api-access-d2km9") pod "b711a159-97fd-4bcb-b067-68feadcd8da6" (UID: "b711a159-97fd-4bcb-b067-68feadcd8da6"). InnerVolumeSpecName "kube-api-access-d2km9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.374588 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2km9\" (UniqueName: \"kubernetes.io/projected/b711a159-97fd-4bcb-b067-68feadcd8da6-kube-api-access-d2km9\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:10 crc kubenswrapper[4773]: I0122 14:30:10.692924 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b711a159-97fd-4bcb-b067-68feadcd8da6" path="/var/lib/kubelet/pods/b711a159-97fd-4bcb-b067-68feadcd8da6/volumes" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.051564 4773 scope.go:117] "RemoveContainer" containerID="07564a94ae77942436da811bf37cf4f1aec7e80b6ccf912905462b8c85091a74" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.051846 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-jq7sj" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.431324 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-xz5wm"] Jan 22 14:30:11 crc kubenswrapper[4773]: E0122 14:30:11.432092 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc2ed8a9-37f2-4e23-8937-945c879bf347" containerName="collect-profiles" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.432110 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc2ed8a9-37f2-4e23-8937-945c879bf347" containerName="collect-profiles" Jan 22 14:30:11 crc kubenswrapper[4773]: E0122 14:30:11.432136 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b711a159-97fd-4bcb-b067-68feadcd8da6" containerName="container-00" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.432144 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b711a159-97fd-4bcb-b067-68feadcd8da6" containerName="container-00" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.432498 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc2ed8a9-37f2-4e23-8937-945c879bf347" containerName="collect-profiles" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.432535 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b711a159-97fd-4bcb-b067-68feadcd8da6" containerName="container-00" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.433280 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.510808 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7296abf7-c04f-430d-88cc-e62126b93e4e-host\") pod \"crc-debug-xz5wm\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.511107 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwrxg\" (UniqueName: \"kubernetes.io/projected/7296abf7-c04f-430d-88cc-e62126b93e4e-kube-api-access-hwrxg\") pod \"crc-debug-xz5wm\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.612864 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7296abf7-c04f-430d-88cc-e62126b93e4e-host\") pod \"crc-debug-xz5wm\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.612984 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwrxg\" (UniqueName: \"kubernetes.io/projected/7296abf7-c04f-430d-88cc-e62126b93e4e-kube-api-access-hwrxg\") pod \"crc-debug-xz5wm\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.613002 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7296abf7-c04f-430d-88cc-e62126b93e4e-host\") pod \"crc-debug-xz5wm\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.629345 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwrxg\" (UniqueName: \"kubernetes.io/projected/7296abf7-c04f-430d-88cc-e62126b93e4e-kube-api-access-hwrxg\") pod \"crc-debug-xz5wm\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: I0122 14:30:11.749525 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:11 crc kubenswrapper[4773]: W0122 14:30:11.778034 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7296abf7_c04f_430d_88cc_e62126b93e4e.slice/crio-f18f77bf977d3b4f58236670ef3c8fff9c8854c252efdd73591fcd933b594b81 WatchSource:0}: Error finding container f18f77bf977d3b4f58236670ef3c8fff9c8854c252efdd73591fcd933b594b81: Status 404 returned error can't find the container with id f18f77bf977d3b4f58236670ef3c8fff9c8854c252efdd73591fcd933b594b81 Jan 22 14:30:12 crc kubenswrapper[4773]: I0122 14:30:12.065083 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" event={"ID":"7296abf7-c04f-430d-88cc-e62126b93e4e","Type":"ContainerStarted","Data":"f11a848b76b63046d6e59373603a6be400035eb2d8bf50b86bb70f1cf59f2bb8"} Jan 22 14:30:12 crc kubenswrapper[4773]: I0122 14:30:12.065371 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" event={"ID":"7296abf7-c04f-430d-88cc-e62126b93e4e","Type":"ContainerStarted","Data":"f18f77bf977d3b4f58236670ef3c8fff9c8854c252efdd73591fcd933b594b81"} Jan 22 14:30:12 crc kubenswrapper[4773]: I0122 14:30:12.083624 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" podStartSLOduration=1.083601443 podStartE2EDuration="1.083601443s" podCreationTimestamp="2026-01-22 14:30:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:30:12.080413203 +0000 UTC m=+9319.658529028" watchObservedRunningTime="2026-01-22 14:30:12.083601443 +0000 UTC m=+9319.661717268" Jan 22 14:30:12 crc kubenswrapper[4773]: I0122 14:30:12.666470 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:30:12 crc kubenswrapper[4773]: E0122 14:30:12.667456 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:30:13 crc kubenswrapper[4773]: I0122 14:30:13.078055 4773 generic.go:334] "Generic (PLEG): container finished" podID="7296abf7-c04f-430d-88cc-e62126b93e4e" containerID="f11a848b76b63046d6e59373603a6be400035eb2d8bf50b86bb70f1cf59f2bb8" exitCode=0 Jan 22 14:30:13 crc kubenswrapper[4773]: I0122 14:30:13.078160 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" event={"ID":"7296abf7-c04f-430d-88cc-e62126b93e4e","Type":"ContainerDied","Data":"f11a848b76b63046d6e59373603a6be400035eb2d8bf50b86bb70f1cf59f2bb8"} Jan 22 14:30:13 crc kubenswrapper[4773]: I0122 14:30:13.648806 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:13 crc kubenswrapper[4773]: I0122 14:30:13.649090 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:13 crc kubenswrapper[4773]: I0122 14:30:13.718877 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.161055 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.221586 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gmrrp"] Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.225017 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.257755 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-xz5wm"] Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.266027 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-xz5wm"] Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.279188 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwrxg\" (UniqueName: \"kubernetes.io/projected/7296abf7-c04f-430d-88cc-e62126b93e4e-kube-api-access-hwrxg\") pod \"7296abf7-c04f-430d-88cc-e62126b93e4e\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.279400 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7296abf7-c04f-430d-88cc-e62126b93e4e-host\") pod \"7296abf7-c04f-430d-88cc-e62126b93e4e\" (UID: \"7296abf7-c04f-430d-88cc-e62126b93e4e\") " Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.279491 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7296abf7-c04f-430d-88cc-e62126b93e4e-host" (OuterVolumeSpecName: "host") pod "7296abf7-c04f-430d-88cc-e62126b93e4e" (UID: "7296abf7-c04f-430d-88cc-e62126b93e4e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.280176 4773 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7296abf7-c04f-430d-88cc-e62126b93e4e-host\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.284021 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7296abf7-c04f-430d-88cc-e62126b93e4e-kube-api-access-hwrxg" (OuterVolumeSpecName: "kube-api-access-hwrxg") pod "7296abf7-c04f-430d-88cc-e62126b93e4e" (UID: "7296abf7-c04f-430d-88cc-e62126b93e4e"). InnerVolumeSpecName "kube-api-access-hwrxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.382211 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwrxg\" (UniqueName: \"kubernetes.io/projected/7296abf7-c04f-430d-88cc-e62126b93e4e-kube-api-access-hwrxg\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:14 crc kubenswrapper[4773]: I0122 14:30:14.671247 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7296abf7-c04f-430d-88cc-e62126b93e4e" path="/var/lib/kubelet/pods/7296abf7-c04f-430d-88cc-e62126b93e4e/volumes" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.100931 4773 scope.go:117] "RemoveContainer" containerID="f11a848b76b63046d6e59373603a6be400035eb2d8bf50b86bb70f1cf59f2bb8" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.100966 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-xz5wm" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.336662 4773 scope.go:117] "RemoveContainer" containerID="439bc0c45b59d44bf8d65ef287b86cd1c7e6669d84e8d222c333b7687c485497" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.440558 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-rmv62"] Jan 22 14:30:15 crc kubenswrapper[4773]: E0122 14:30:15.441220 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7296abf7-c04f-430d-88cc-e62126b93e4e" containerName="container-00" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.441329 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="7296abf7-c04f-430d-88cc-e62126b93e4e" containerName="container-00" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.441633 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="7296abf7-c04f-430d-88cc-e62126b93e4e" containerName="container-00" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.442409 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.507856 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkq2z\" (UniqueName: \"kubernetes.io/projected/089aa4c1-e47d-4211-bd45-80e61c2d9848-kube-api-access-hkq2z\") pod \"crc-debug-rmv62\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.508548 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/089aa4c1-e47d-4211-bd45-80e61c2d9848-host\") pod \"crc-debug-rmv62\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.611090 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkq2z\" (UniqueName: \"kubernetes.io/projected/089aa4c1-e47d-4211-bd45-80e61c2d9848-kube-api-access-hkq2z\") pod \"crc-debug-rmv62\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.611579 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/089aa4c1-e47d-4211-bd45-80e61c2d9848-host\") pod \"crc-debug-rmv62\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.611693 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/089aa4c1-e47d-4211-bd45-80e61c2d9848-host\") pod \"crc-debug-rmv62\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.633826 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkq2z\" (UniqueName: \"kubernetes.io/projected/089aa4c1-e47d-4211-bd45-80e61c2d9848-kube-api-access-hkq2z\") pod \"crc-debug-rmv62\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: I0122 14:30:15.761730 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:15 crc kubenswrapper[4773]: W0122 14:30:15.794996 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod089aa4c1_e47d_4211_bd45_80e61c2d9848.slice/crio-9febdab799b8a1985f4ce0f4f2e4a7ba1b78d441d00685eb1944cf50a22877f2 WatchSource:0}: Error finding container 9febdab799b8a1985f4ce0f4f2e4a7ba1b78d441d00685eb1944cf50a22877f2: Status 404 returned error can't find the container with id 9febdab799b8a1985f4ce0f4f2e4a7ba1b78d441d00685eb1944cf50a22877f2 Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.115636 4773 generic.go:334] "Generic (PLEG): container finished" podID="089aa4c1-e47d-4211-bd45-80e61c2d9848" containerID="23f1b2909cfbc0662c019915a8f598d3ef995642ca56cd8fa639ea1e033366dc" exitCode=0 Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.116216 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gmrrp" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="registry-server" containerID="cri-o://6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80" gracePeriod=2 Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.115804 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-rmv62" event={"ID":"089aa4c1-e47d-4211-bd45-80e61c2d9848","Type":"ContainerDied","Data":"23f1b2909cfbc0662c019915a8f598d3ef995642ca56cd8fa639ea1e033366dc"} Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.116379 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-n4qb8/crc-debug-rmv62" event={"ID":"089aa4c1-e47d-4211-bd45-80e61c2d9848","Type":"ContainerStarted","Data":"9febdab799b8a1985f4ce0f4f2e4a7ba1b78d441d00685eb1944cf50a22877f2"} Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.160038 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-rmv62"] Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.178752 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-n4qb8/crc-debug-rmv62"] Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.636101 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.744010 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-catalog-content\") pod \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.744058 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29n9c\" (UniqueName: \"kubernetes.io/projected/38978cc9-f8e1-4c57-bf80-ca1c147baecc-kube-api-access-29n9c\") pod \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.744310 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-utilities\") pod \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\" (UID: \"38978cc9-f8e1-4c57-bf80-ca1c147baecc\") " Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.745733 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-utilities" (OuterVolumeSpecName: "utilities") pod "38978cc9-f8e1-4c57-bf80-ca1c147baecc" (UID: "38978cc9-f8e1-4c57-bf80-ca1c147baecc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.749557 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38978cc9-f8e1-4c57-bf80-ca1c147baecc-kube-api-access-29n9c" (OuterVolumeSpecName: "kube-api-access-29n9c") pod "38978cc9-f8e1-4c57-bf80-ca1c147baecc" (UID: "38978cc9-f8e1-4c57-bf80-ca1c147baecc"). InnerVolumeSpecName "kube-api-access-29n9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.774100 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38978cc9-f8e1-4c57-bf80-ca1c147baecc" (UID: "38978cc9-f8e1-4c57-bf80-ca1c147baecc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.850461 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.850503 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38978cc9-f8e1-4c57-bf80-ca1c147baecc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:16 crc kubenswrapper[4773]: I0122 14:30:16.850521 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29n9c\" (UniqueName: \"kubernetes.io/projected/38978cc9-f8e1-4c57-bf80-ca1c147baecc-kube-api-access-29n9c\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.142151 4773 generic.go:334] "Generic (PLEG): container finished" podID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerID="6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80" exitCode=0 Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.142599 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gmrrp" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.142494 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gmrrp" event={"ID":"38978cc9-f8e1-4c57-bf80-ca1c147baecc","Type":"ContainerDied","Data":"6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80"} Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.144112 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gmrrp" event={"ID":"38978cc9-f8e1-4c57-bf80-ca1c147baecc","Type":"ContainerDied","Data":"9c13612aea84bb264f2deee83a3fab13de09b0bd38d01dce91b0bf6f6096d070"} Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.144166 4773 scope.go:117] "RemoveContainer" containerID="6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.270747 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.272437 4773 scope.go:117] "RemoveContainer" containerID="1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.296486 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gmrrp"] Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.313120 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gmrrp"] Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.314999 4773 scope.go:117] "RemoveContainer" containerID="2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.364880 4773 scope.go:117] "RemoveContainer" containerID="6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80" Jan 22 14:30:17 crc kubenswrapper[4773]: E0122 14:30:17.365344 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80\": container with ID starting with 6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80 not found: ID does not exist" containerID="6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.365385 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80"} err="failed to get container status \"6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80\": rpc error: code = NotFound desc = could not find container \"6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80\": container with ID starting with 6fb63e079b1e129d6519978ff0543602235b003f9e5b73f54f8c535b31b79e80 not found: ID does not exist" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.365411 4773 scope.go:117] "RemoveContainer" containerID="1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe" Jan 22 14:30:17 crc kubenswrapper[4773]: E0122 14:30:17.365847 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe\": container with ID starting with 1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe not found: ID does not exist" containerID="1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.365906 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe"} err="failed to get container status \"1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe\": rpc error: code = NotFound desc = could not find container \"1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe\": container with ID starting with 1ec44e1c886ea2156eaf9582f005f4123fe47dd7d8cecca01fe8e09928afbabe not found: ID does not exist" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.365927 4773 scope.go:117] "RemoveContainer" containerID="2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db" Jan 22 14:30:17 crc kubenswrapper[4773]: E0122 14:30:17.366185 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db\": container with ID starting with 2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db not found: ID does not exist" containerID="2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.366213 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db"} err="failed to get container status \"2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db\": rpc error: code = NotFound desc = could not find container \"2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db\": container with ID starting with 2db1c74c8d9f072f65b0c886e5357204a1d1676e24f1dfb490447c82394d10db not found: ID does not exist" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.369603 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkq2z\" (UniqueName: \"kubernetes.io/projected/089aa4c1-e47d-4211-bd45-80e61c2d9848-kube-api-access-hkq2z\") pod \"089aa4c1-e47d-4211-bd45-80e61c2d9848\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.369789 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/089aa4c1-e47d-4211-bd45-80e61c2d9848-host\") pod \"089aa4c1-e47d-4211-bd45-80e61c2d9848\" (UID: \"089aa4c1-e47d-4211-bd45-80e61c2d9848\") " Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.369827 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/089aa4c1-e47d-4211-bd45-80e61c2d9848-host" (OuterVolumeSpecName: "host") pod "089aa4c1-e47d-4211-bd45-80e61c2d9848" (UID: "089aa4c1-e47d-4211-bd45-80e61c2d9848"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.370490 4773 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/089aa4c1-e47d-4211-bd45-80e61c2d9848-host\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.374384 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/089aa4c1-e47d-4211-bd45-80e61c2d9848-kube-api-access-hkq2z" (OuterVolumeSpecName: "kube-api-access-hkq2z") pod "089aa4c1-e47d-4211-bd45-80e61c2d9848" (UID: "089aa4c1-e47d-4211-bd45-80e61c2d9848"). InnerVolumeSpecName "kube-api-access-hkq2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:30:17 crc kubenswrapper[4773]: I0122 14:30:17.474740 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkq2z\" (UniqueName: \"kubernetes.io/projected/089aa4c1-e47d-4211-bd45-80e61c2d9848-kube-api-access-hkq2z\") on node \"crc\" DevicePath \"\"" Jan 22 14:30:18 crc kubenswrapper[4773]: I0122 14:30:18.155589 4773 scope.go:117] "RemoveContainer" containerID="23f1b2909cfbc0662c019915a8f598d3ef995642ca56cd8fa639ea1e033366dc" Jan 22 14:30:18 crc kubenswrapper[4773]: I0122 14:30:18.155873 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-n4qb8/crc-debug-rmv62" Jan 22 14:30:18 crc kubenswrapper[4773]: I0122 14:30:18.694512 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="089aa4c1-e47d-4211-bd45-80e61c2d9848" path="/var/lib/kubelet/pods/089aa4c1-e47d-4211-bd45-80e61c2d9848/volumes" Jan 22 14:30:18 crc kubenswrapper[4773]: I0122 14:30:18.695246 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" path="/var/lib/kubelet/pods/38978cc9-f8e1-4c57-bf80-ca1c147baecc/volumes" Jan 22 14:30:23 crc kubenswrapper[4773]: I0122 14:30:23.658148 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:30:23 crc kubenswrapper[4773]: E0122 14:30:23.659051 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:30:37 crc kubenswrapper[4773]: I0122 14:30:37.658755 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:30:37 crc kubenswrapper[4773]: E0122 14:30:37.659579 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:30:52 crc kubenswrapper[4773]: I0122 14:30:52.672612 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:30:52 crc kubenswrapper[4773]: E0122 14:30:52.673588 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:31:05 crc kubenswrapper[4773]: I0122 14:31:05.658680 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:31:05 crc kubenswrapper[4773]: E0122 14:31:05.659610 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.115453 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vqw46"] Jan 22 14:31:09 crc kubenswrapper[4773]: E0122 14:31:09.116372 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="extract-utilities" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.116384 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="extract-utilities" Jan 22 14:31:09 crc kubenswrapper[4773]: E0122 14:31:09.116416 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="registry-server" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.116422 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="registry-server" Jan 22 14:31:09 crc kubenswrapper[4773]: E0122 14:31:09.116440 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="089aa4c1-e47d-4211-bd45-80e61c2d9848" containerName="container-00" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.116447 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="089aa4c1-e47d-4211-bd45-80e61c2d9848" containerName="container-00" Jan 22 14:31:09 crc kubenswrapper[4773]: E0122 14:31:09.116472 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="extract-content" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.116482 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="extract-content" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.116677 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="089aa4c1-e47d-4211-bd45-80e61c2d9848" containerName="container-00" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.116712 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="38978cc9-f8e1-4c57-bf80-ca1c147baecc" containerName="registry-server" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.118370 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.133734 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vqw46"] Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.183606 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-utilities\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.183678 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-catalog-content\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.183925 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7m7l\" (UniqueName: \"kubernetes.io/projected/973781d3-44a1-47ad-b35a-03fa93d4c0e0-kube-api-access-l7m7l\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.285626 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7m7l\" (UniqueName: \"kubernetes.io/projected/973781d3-44a1-47ad-b35a-03fa93d4c0e0-kube-api-access-l7m7l\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.285753 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-utilities\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.285785 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-catalog-content\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.286164 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-utilities\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.286325 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-catalog-content\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.317430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7m7l\" (UniqueName: \"kubernetes.io/projected/973781d3-44a1-47ad-b35a-03fa93d4c0e0-kube-api-access-l7m7l\") pod \"certified-operators-vqw46\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:09 crc kubenswrapper[4773]: I0122 14:31:09.439960 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:10 crc kubenswrapper[4773]: I0122 14:31:10.029227 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vqw46"] Jan 22 14:31:10 crc kubenswrapper[4773]: I0122 14:31:10.748039 4773 generic.go:334] "Generic (PLEG): container finished" podID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerID="939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec" exitCode=0 Jan 22 14:31:10 crc kubenswrapper[4773]: I0122 14:31:10.748264 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerDied","Data":"939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec"} Jan 22 14:31:10 crc kubenswrapper[4773]: I0122 14:31:10.748314 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerStarted","Data":"9f0deec34388940a3a4a29393b5767e8e3e7001caf82fbfd8905726302e42fa8"} Jan 22 14:31:12 crc kubenswrapper[4773]: I0122 14:31:12.769380 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerStarted","Data":"229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464"} Jan 22 14:31:13 crc kubenswrapper[4773]: I0122 14:31:13.785619 4773 generic.go:334] "Generic (PLEG): container finished" podID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerID="229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464" exitCode=0 Jan 22 14:31:13 crc kubenswrapper[4773]: I0122 14:31:13.785956 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerDied","Data":"229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464"} Jan 22 14:31:14 crc kubenswrapper[4773]: I0122 14:31:14.798645 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerStarted","Data":"137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6"} Jan 22 14:31:14 crc kubenswrapper[4773]: I0122 14:31:14.827161 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vqw46" podStartSLOduration=2.358220744 podStartE2EDuration="5.827137714s" podCreationTimestamp="2026-01-22 14:31:09 +0000 UTC" firstStartedPulling="2026-01-22 14:31:10.750024647 +0000 UTC m=+9378.328140472" lastFinishedPulling="2026-01-22 14:31:14.218941617 +0000 UTC m=+9381.797057442" observedRunningTime="2026-01-22 14:31:14.815213886 +0000 UTC m=+9382.393329721" watchObservedRunningTime="2026-01-22 14:31:14.827137714 +0000 UTC m=+9382.405253539" Jan 22 14:31:19 crc kubenswrapper[4773]: I0122 14:31:19.440440 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:19 crc kubenswrapper[4773]: I0122 14:31:19.441188 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:19 crc kubenswrapper[4773]: I0122 14:31:19.496721 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:19 crc kubenswrapper[4773]: I0122 14:31:19.659578 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:31:19 crc kubenswrapper[4773]: E0122 14:31:19.659840 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:31:19 crc kubenswrapper[4773]: I0122 14:31:19.899784 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:19 crc kubenswrapper[4773]: I0122 14:31:19.955185 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vqw46"] Jan 22 14:31:21 crc kubenswrapper[4773]: I0122 14:31:21.879539 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vqw46" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="registry-server" containerID="cri-o://137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6" gracePeriod=2 Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.481027 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.591733 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7m7l\" (UniqueName: \"kubernetes.io/projected/973781d3-44a1-47ad-b35a-03fa93d4c0e0-kube-api-access-l7m7l\") pod \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.591929 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-catalog-content\") pod \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.591973 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-utilities\") pod \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\" (UID: \"973781d3-44a1-47ad-b35a-03fa93d4c0e0\") " Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.593038 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-utilities" (OuterVolumeSpecName: "utilities") pod "973781d3-44a1-47ad-b35a-03fa93d4c0e0" (UID: "973781d3-44a1-47ad-b35a-03fa93d4c0e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.657524 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "973781d3-44a1-47ad-b35a-03fa93d4c0e0" (UID: "973781d3-44a1-47ad-b35a-03fa93d4c0e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.693833 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.693865 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/973781d3-44a1-47ad-b35a-03fa93d4c0e0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.891314 4773 generic.go:334] "Generic (PLEG): container finished" podID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerID="137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6" exitCode=0 Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.891377 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vqw46" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.891374 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerDied","Data":"137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6"} Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.891811 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vqw46" event={"ID":"973781d3-44a1-47ad-b35a-03fa93d4c0e0","Type":"ContainerDied","Data":"9f0deec34388940a3a4a29393b5767e8e3e7001caf82fbfd8905726302e42fa8"} Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.891847 4773 scope.go:117] "RemoveContainer" containerID="137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6" Jan 22 14:31:22 crc kubenswrapper[4773]: I0122 14:31:22.915486 4773 scope.go:117] "RemoveContainer" containerID="229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.299766 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/973781d3-44a1-47ad-b35a-03fa93d4c0e0-kube-api-access-l7m7l" (OuterVolumeSpecName: "kube-api-access-l7m7l") pod "973781d3-44a1-47ad-b35a-03fa93d4c0e0" (UID: "973781d3-44a1-47ad-b35a-03fa93d4c0e0"). InnerVolumeSpecName "kube-api-access-l7m7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.308129 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7m7l\" (UniqueName: \"kubernetes.io/projected/973781d3-44a1-47ad-b35a-03fa93d4c0e0-kube-api-access-l7m7l\") on node \"crc\" DevicePath \"\"" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.315041 4773 scope.go:117] "RemoveContainer" containerID="939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.444756 4773 scope.go:117] "RemoveContainer" containerID="137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6" Jan 22 14:31:23 crc kubenswrapper[4773]: E0122 14:31:23.445360 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6\": container with ID starting with 137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6 not found: ID does not exist" containerID="137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.445392 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6"} err="failed to get container status \"137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6\": rpc error: code = NotFound desc = could not find container \"137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6\": container with ID starting with 137ff4bcea45aa45ee4bceb21446985c1749c33620085624827a76027a143bc6 not found: ID does not exist" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.445412 4773 scope.go:117] "RemoveContainer" containerID="229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464" Jan 22 14:31:23 crc kubenswrapper[4773]: E0122 14:31:23.445952 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464\": container with ID starting with 229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464 not found: ID does not exist" containerID="229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.446008 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464"} err="failed to get container status \"229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464\": rpc error: code = NotFound desc = could not find container \"229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464\": container with ID starting with 229bbfcaec1152548f0305022561f8536ac6b99890eea9c2e5e907c3fe86a464 not found: ID does not exist" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.446042 4773 scope.go:117] "RemoveContainer" containerID="939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec" Jan 22 14:31:23 crc kubenswrapper[4773]: E0122 14:31:23.446492 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec\": container with ID starting with 939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec not found: ID does not exist" containerID="939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.446517 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec"} err="failed to get container status \"939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec\": rpc error: code = NotFound desc = could not find container \"939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec\": container with ID starting with 939675bedfd192b9f9e12a71cdef6bf2e944fa48c8302c79c0c4eac578145aec not found: ID does not exist" Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.537113 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vqw46"] Jan 22 14:31:23 crc kubenswrapper[4773]: I0122 14:31:23.550077 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vqw46"] Jan 22 14:31:24 crc kubenswrapper[4773]: I0122 14:31:24.674408 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" path="/var/lib/kubelet/pods/973781d3-44a1-47ad-b35a-03fa93d4c0e0/volumes" Jan 22 14:31:30 crc kubenswrapper[4773]: I0122 14:31:30.658894 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:31:30 crc kubenswrapper[4773]: E0122 14:31:30.659889 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:31:45 crc kubenswrapper[4773]: I0122 14:31:45.658449 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:31:45 crc kubenswrapper[4773]: E0122 14:31:45.659257 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:31:57 crc kubenswrapper[4773]: I0122 14:31:57.658547 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:31:57 crc kubenswrapper[4773]: E0122 14:31:57.659600 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:32:10 crc kubenswrapper[4773]: I0122 14:32:10.658001 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:32:11 crc kubenswrapper[4773]: I0122 14:32:11.424573 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"0c99cf5bd72a8c83907456a25951bd2d402249dde0e1031ee27528783149ed73"} Jan 22 14:34:34 crc kubenswrapper[4773]: I0122 14:34:34.075003 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:34:34 crc kubenswrapper[4773]: I0122 14:34:34.075813 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.677172 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kbs6b"] Jan 22 14:34:37 crc kubenswrapper[4773]: E0122 14:34:37.678669 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="extract-content" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.678704 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="extract-content" Jan 22 14:34:37 crc kubenswrapper[4773]: E0122 14:34:37.678753 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="registry-server" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.678768 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="registry-server" Jan 22 14:34:37 crc kubenswrapper[4773]: E0122 14:34:37.678801 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="extract-utilities" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.678819 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="extract-utilities" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.679453 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="973781d3-44a1-47ad-b35a-03fa93d4c0e0" containerName="registry-server" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.683599 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.702008 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kbs6b"] Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.852309 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-catalog-content\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.853580 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t5sw\" (UniqueName: \"kubernetes.io/projected/2caeb528-c83d-4535-be7d-e9b2afee7ff4-kube-api-access-7t5sw\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.853781 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-utilities\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.955950 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-catalog-content\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.956083 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t5sw\" (UniqueName: \"kubernetes.io/projected/2caeb528-c83d-4535-be7d-e9b2afee7ff4-kube-api-access-7t5sw\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.956157 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-utilities\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.956562 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-catalog-content\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.956888 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-utilities\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:37 crc kubenswrapper[4773]: I0122 14:34:37.978973 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t5sw\" (UniqueName: \"kubernetes.io/projected/2caeb528-c83d-4535-be7d-e9b2afee7ff4-kube-api-access-7t5sw\") pod \"redhat-operators-kbs6b\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:38 crc kubenswrapper[4773]: I0122 14:34:38.022568 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:38 crc kubenswrapper[4773]: I0122 14:34:38.555635 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kbs6b"] Jan 22 14:34:39 crc kubenswrapper[4773]: W0122 14:34:39.216996 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2caeb528_c83d_4535_be7d_e9b2afee7ff4.slice/crio-dd897705d06de5d6dfdba7b67aef70365373ee02aacb1b18d8e41023e4e442f2 WatchSource:0}: Error finding container dd897705d06de5d6dfdba7b67aef70365373ee02aacb1b18d8e41023e4e442f2: Status 404 returned error can't find the container with id dd897705d06de5d6dfdba7b67aef70365373ee02aacb1b18d8e41023e4e442f2 Jan 22 14:34:39 crc kubenswrapper[4773]: I0122 14:34:39.609509 4773 generic.go:334] "Generic (PLEG): container finished" podID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerID="9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce" exitCode=0 Jan 22 14:34:39 crc kubenswrapper[4773]: I0122 14:34:39.609583 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerDied","Data":"9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce"} Jan 22 14:34:39 crc kubenswrapper[4773]: I0122 14:34:39.610142 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerStarted","Data":"dd897705d06de5d6dfdba7b67aef70365373ee02aacb1b18d8e41023e4e442f2"} Jan 22 14:34:41 crc kubenswrapper[4773]: I0122 14:34:41.641607 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerStarted","Data":"e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c"} Jan 22 14:34:42 crc kubenswrapper[4773]: I0122 14:34:42.798768 4773 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="47b6e6f2-15bb-47c7-bf7b-b4c925e27d36" containerName="ovn-northd" probeResult="failure" output="command timed out" Jan 22 14:34:42 crc kubenswrapper[4773]: I0122 14:34:42.799498 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ovn-northd-0" podUID="47b6e6f2-15bb-47c7-bf7b-b4c925e27d36" containerName="ovn-northd" probeResult="failure" output="command timed out" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.865221 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-44b7l"] Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.867728 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.888150 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-catalog-content\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.888417 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghhgk\" (UniqueName: \"kubernetes.io/projected/ee930bb5-f551-4f82-b6bc-71b93585588a-kube-api-access-ghhgk\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.888496 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-utilities\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.893447 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-44b7l"] Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.990353 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-utilities\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.990473 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-catalog-content\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.990652 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghhgk\" (UniqueName: \"kubernetes.io/projected/ee930bb5-f551-4f82-b6bc-71b93585588a-kube-api-access-ghhgk\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.990974 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-utilities\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:43 crc kubenswrapper[4773]: I0122 14:34:43.991037 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-catalog-content\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:44 crc kubenswrapper[4773]: I0122 14:34:44.021145 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghhgk\" (UniqueName: \"kubernetes.io/projected/ee930bb5-f551-4f82-b6bc-71b93585588a-kube-api-access-ghhgk\") pod \"community-operators-44b7l\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:44 crc kubenswrapper[4773]: I0122 14:34:44.196912 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:44 crc kubenswrapper[4773]: I0122 14:34:44.726493 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-44b7l"] Jan 22 14:34:44 crc kubenswrapper[4773]: W0122 14:34:44.727381 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee930bb5_f551_4f82_b6bc_71b93585588a.slice/crio-596857adb958fd42db8cdd5ab2a7f815cbbc0403af21b8714d9f426b939dcd4a WatchSource:0}: Error finding container 596857adb958fd42db8cdd5ab2a7f815cbbc0403af21b8714d9f426b939dcd4a: Status 404 returned error can't find the container with id 596857adb958fd42db8cdd5ab2a7f815cbbc0403af21b8714d9f426b939dcd4a Jan 22 14:34:45 crc kubenswrapper[4773]: I0122 14:34:45.678877 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerStarted","Data":"596857adb958fd42db8cdd5ab2a7f815cbbc0403af21b8714d9f426b939dcd4a"} Jan 22 14:34:46 crc kubenswrapper[4773]: I0122 14:34:46.690962 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerID="951fc94a6e947f8052fb699e0c4e71803f720894ed63089a707cb01febabe918" exitCode=0 Jan 22 14:34:46 crc kubenswrapper[4773]: I0122 14:34:46.691024 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerDied","Data":"951fc94a6e947f8052fb699e0c4e71803f720894ed63089a707cb01febabe918"} Jan 22 14:34:46 crc kubenswrapper[4773]: I0122 14:34:46.695654 4773 generic.go:334] "Generic (PLEG): container finished" podID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerID="e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c" exitCode=0 Jan 22 14:34:46 crc kubenswrapper[4773]: I0122 14:34:46.695693 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerDied","Data":"e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c"} Jan 22 14:34:47 crc kubenswrapper[4773]: I0122 14:34:47.707313 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerStarted","Data":"2cd9d02deecee3aadbc577a938933805977e2dfbe75f0ec569f00ee56dc18268"} Jan 22 14:34:47 crc kubenswrapper[4773]: I0122 14:34:47.709353 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerStarted","Data":"0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd"} Jan 22 14:34:47 crc kubenswrapper[4773]: I0122 14:34:47.760963 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kbs6b" podStartSLOduration=3.260072806 podStartE2EDuration="10.760936784s" podCreationTimestamp="2026-01-22 14:34:37 +0000 UTC" firstStartedPulling="2026-01-22 14:34:39.612072292 +0000 UTC m=+9587.190188117" lastFinishedPulling="2026-01-22 14:34:47.11293626 +0000 UTC m=+9594.691052095" observedRunningTime="2026-01-22 14:34:47.753421891 +0000 UTC m=+9595.331537736" watchObservedRunningTime="2026-01-22 14:34:47.760936784 +0000 UTC m=+9595.339052609" Jan 22 14:34:48 crc kubenswrapper[4773]: I0122 14:34:48.023325 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:48 crc kubenswrapper[4773]: I0122 14:34:48.023707 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:34:49 crc kubenswrapper[4773]: I0122 14:34:49.071801 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kbs6b" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="registry-server" probeResult="failure" output=< Jan 22 14:34:49 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:34:49 crc kubenswrapper[4773]: > Jan 22 14:34:49 crc kubenswrapper[4773]: I0122 14:34:49.738124 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerID="2cd9d02deecee3aadbc577a938933805977e2dfbe75f0ec569f00ee56dc18268" exitCode=0 Jan 22 14:34:49 crc kubenswrapper[4773]: I0122 14:34:49.738174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerDied","Data":"2cd9d02deecee3aadbc577a938933805977e2dfbe75f0ec569f00ee56dc18268"} Jan 22 14:34:50 crc kubenswrapper[4773]: I0122 14:34:50.753238 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerStarted","Data":"29144f4b7ea44d448d0c3f0eecc3a615564b24211e5ff5eaafc056431e3d4342"} Jan 22 14:34:50 crc kubenswrapper[4773]: I0122 14:34:50.776427 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-44b7l" podStartSLOduration=4.299668222 podStartE2EDuration="7.776400604s" podCreationTimestamp="2026-01-22 14:34:43 +0000 UTC" firstStartedPulling="2026-01-22 14:34:46.693258556 +0000 UTC m=+9594.271374381" lastFinishedPulling="2026-01-22 14:34:50.169990938 +0000 UTC m=+9597.748106763" observedRunningTime="2026-01-22 14:34:50.768098889 +0000 UTC m=+9598.346214714" watchObservedRunningTime="2026-01-22 14:34:50.776400604 +0000 UTC m=+9598.354516429" Jan 22 14:34:54 crc kubenswrapper[4773]: I0122 14:34:54.197468 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:54 crc kubenswrapper[4773]: I0122 14:34:54.198121 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:54 crc kubenswrapper[4773]: I0122 14:34:54.276547 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:34:59 crc kubenswrapper[4773]: I0122 14:34:59.166445 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kbs6b" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="registry-server" probeResult="failure" output=< Jan 22 14:34:59 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:34:59 crc kubenswrapper[4773]: > Jan 22 14:35:04 crc kubenswrapper[4773]: I0122 14:35:04.074878 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:35:04 crc kubenswrapper[4773]: I0122 14:35:04.075620 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:35:04 crc kubenswrapper[4773]: I0122 14:35:04.263229 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:35:04 crc kubenswrapper[4773]: I0122 14:35:04.316414 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-44b7l"] Jan 22 14:35:04 crc kubenswrapper[4773]: I0122 14:35:04.960446 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-44b7l" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="registry-server" containerID="cri-o://29144f4b7ea44d448d0c3f0eecc3a615564b24211e5ff5eaafc056431e3d4342" gracePeriod=2 Jan 22 14:35:05 crc kubenswrapper[4773]: I0122 14:35:05.986619 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerID="29144f4b7ea44d448d0c3f0eecc3a615564b24211e5ff5eaafc056431e3d4342" exitCode=0 Jan 22 14:35:05 crc kubenswrapper[4773]: I0122 14:35:05.986706 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerDied","Data":"29144f4b7ea44d448d0c3f0eecc3a615564b24211e5ff5eaafc056431e3d4342"} Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.126830 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.248712 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-catalog-content\") pod \"ee930bb5-f551-4f82-b6bc-71b93585588a\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.248860 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghhgk\" (UniqueName: \"kubernetes.io/projected/ee930bb5-f551-4f82-b6bc-71b93585588a-kube-api-access-ghhgk\") pod \"ee930bb5-f551-4f82-b6bc-71b93585588a\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.249019 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-utilities\") pod \"ee930bb5-f551-4f82-b6bc-71b93585588a\" (UID: \"ee930bb5-f551-4f82-b6bc-71b93585588a\") " Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.249708 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-utilities" (OuterVolumeSpecName: "utilities") pod "ee930bb5-f551-4f82-b6bc-71b93585588a" (UID: "ee930bb5-f551-4f82-b6bc-71b93585588a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.255376 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee930bb5-f551-4f82-b6bc-71b93585588a-kube-api-access-ghhgk" (OuterVolumeSpecName: "kube-api-access-ghhgk") pod "ee930bb5-f551-4f82-b6bc-71b93585588a" (UID: "ee930bb5-f551-4f82-b6bc-71b93585588a"). InnerVolumeSpecName "kube-api-access-ghhgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.301699 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee930bb5-f551-4f82-b6bc-71b93585588a" (UID: "ee930bb5-f551-4f82-b6bc-71b93585588a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.352180 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghhgk\" (UniqueName: \"kubernetes.io/projected/ee930bb5-f551-4f82-b6bc-71b93585588a-kube-api-access-ghhgk\") on node \"crc\" DevicePath \"\"" Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.352240 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:35:06 crc kubenswrapper[4773]: I0122 14:35:06.352259 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee930bb5-f551-4f82-b6bc-71b93585588a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.003976 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-44b7l" event={"ID":"ee930bb5-f551-4f82-b6bc-71b93585588a","Type":"ContainerDied","Data":"596857adb958fd42db8cdd5ab2a7f815cbbc0403af21b8714d9f426b939dcd4a"} Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.004091 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-44b7l" Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.004254 4773 scope.go:117] "RemoveContainer" containerID="29144f4b7ea44d448d0c3f0eecc3a615564b24211e5ff5eaafc056431e3d4342" Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.045995 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-44b7l"] Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.052757 4773 scope.go:117] "RemoveContainer" containerID="2cd9d02deecee3aadbc577a938933805977e2dfbe75f0ec569f00ee56dc18268" Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.059149 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-44b7l"] Jan 22 14:35:07 crc kubenswrapper[4773]: I0122 14:35:07.081600 4773 scope.go:117] "RemoveContainer" containerID="951fc94a6e947f8052fb699e0c4e71803f720894ed63089a707cb01febabe918" Jan 22 14:35:08 crc kubenswrapper[4773]: I0122 14:35:08.113968 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:35:08 crc kubenswrapper[4773]: I0122 14:35:08.171690 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:35:08 crc kubenswrapper[4773]: I0122 14:35:08.671467 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" path="/var/lib/kubelet/pods/ee930bb5-f551-4f82-b6bc-71b93585588a/volumes" Jan 22 14:35:08 crc kubenswrapper[4773]: I0122 14:35:08.907370 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kbs6b"] Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.066694 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kbs6b" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="registry-server" containerID="cri-o://0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd" gracePeriod=2 Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.569483 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.658003 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t5sw\" (UniqueName: \"kubernetes.io/projected/2caeb528-c83d-4535-be7d-e9b2afee7ff4-kube-api-access-7t5sw\") pod \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.658094 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-catalog-content\") pod \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.658233 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-utilities\") pod \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\" (UID: \"2caeb528-c83d-4535-be7d-e9b2afee7ff4\") " Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.659120 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-utilities" (OuterVolumeSpecName: "utilities") pod "2caeb528-c83d-4535-be7d-e9b2afee7ff4" (UID: "2caeb528-c83d-4535-be7d-e9b2afee7ff4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.660111 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.664161 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2caeb528-c83d-4535-be7d-e9b2afee7ff4-kube-api-access-7t5sw" (OuterVolumeSpecName: "kube-api-access-7t5sw") pod "2caeb528-c83d-4535-be7d-e9b2afee7ff4" (UID: "2caeb528-c83d-4535-be7d-e9b2afee7ff4"). InnerVolumeSpecName "kube-api-access-7t5sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.762354 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t5sw\" (UniqueName: \"kubernetes.io/projected/2caeb528-c83d-4535-be7d-e9b2afee7ff4-kube-api-access-7t5sw\") on node \"crc\" DevicePath \"\"" Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.806599 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2caeb528-c83d-4535-be7d-e9b2afee7ff4" (UID: "2caeb528-c83d-4535-be7d-e9b2afee7ff4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:35:10 crc kubenswrapper[4773]: I0122 14:35:10.866021 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2caeb528-c83d-4535-be7d-e9b2afee7ff4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.080477 4773 generic.go:334] "Generic (PLEG): container finished" podID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerID="0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd" exitCode=0 Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.080517 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerDied","Data":"0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd"} Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.080544 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kbs6b" event={"ID":"2caeb528-c83d-4535-be7d-e9b2afee7ff4","Type":"ContainerDied","Data":"dd897705d06de5d6dfdba7b67aef70365373ee02aacb1b18d8e41023e4e442f2"} Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.080562 4773 scope.go:117] "RemoveContainer" containerID="0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.080909 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kbs6b" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.129662 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kbs6b"] Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.141694 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kbs6b"] Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.144145 4773 scope.go:117] "RemoveContainer" containerID="e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.170807 4773 scope.go:117] "RemoveContainer" containerID="9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.242821 4773 scope.go:117] "RemoveContainer" containerID="0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd" Jan 22 14:35:11 crc kubenswrapper[4773]: E0122 14:35:11.243316 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd\": container with ID starting with 0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd not found: ID does not exist" containerID="0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.243396 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd"} err="failed to get container status \"0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd\": rpc error: code = NotFound desc = could not find container \"0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd\": container with ID starting with 0aa6bec03af76bf15f3bc7798484e59a3a13e2eff118cc4c565adadf533bc9cd not found: ID does not exist" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.243441 4773 scope.go:117] "RemoveContainer" containerID="e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c" Jan 22 14:35:11 crc kubenswrapper[4773]: E0122 14:35:11.244385 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c\": container with ID starting with e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c not found: ID does not exist" containerID="e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.244430 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c"} err="failed to get container status \"e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c\": rpc error: code = NotFound desc = could not find container \"e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c\": container with ID starting with e5da3a802050ada273ded85a3d8de7d184a7567d34f7230451732eeb9c06723c not found: ID does not exist" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.244457 4773 scope.go:117] "RemoveContainer" containerID="9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce" Jan 22 14:35:11 crc kubenswrapper[4773]: E0122 14:35:11.245022 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce\": container with ID starting with 9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce not found: ID does not exist" containerID="9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce" Jan 22 14:35:11 crc kubenswrapper[4773]: I0122 14:35:11.245066 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce"} err="failed to get container status \"9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce\": rpc error: code = NotFound desc = could not find container \"9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce\": container with ID starting with 9b731994317ddc03760be4cf5f84b98068d9df6ddf2bf4ba33ab2ffa28afe4ce not found: ID does not exist" Jan 22 14:35:12 crc kubenswrapper[4773]: I0122 14:35:12.673686 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" path="/var/lib/kubelet/pods/2caeb528-c83d-4535-be7d-e9b2afee7ff4/volumes" Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.074212 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.075064 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.075142 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.076579 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0c99cf5bd72a8c83907456a25951bd2d402249dde0e1031ee27528783149ed73"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.076687 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://0c99cf5bd72a8c83907456a25951bd2d402249dde0e1031ee27528783149ed73" gracePeriod=600 Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.381600 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="0c99cf5bd72a8c83907456a25951bd2d402249dde0e1031ee27528783149ed73" exitCode=0 Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.381682 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"0c99cf5bd72a8c83907456a25951bd2d402249dde0e1031ee27528783149ed73"} Jan 22 14:35:34 crc kubenswrapper[4773]: I0122 14:35:34.381844 4773 scope.go:117] "RemoveContainer" containerID="1838db7135f6f837bd6865c668a794a635091039e8b8ac8c3ad43ce2596ec187" Jan 22 14:35:35 crc kubenswrapper[4773]: I0122 14:35:35.397422 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446"} Jan 22 14:37:03 crc kubenswrapper[4773]: I0122 14:37:03.605036 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qmnzk_220217e9-63dc-4215-b315-3c9290e38510/cert-manager-controller/0.log" Jan 22 14:37:03 crc kubenswrapper[4773]: I0122 14:37:03.633958 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-qnqwg_b6587a6b-06d7-43c0-8db4-dcc854df334d/cert-manager-cainjector/0.log" Jan 22 14:37:03 crc kubenswrapper[4773]: I0122 14:37:03.643152 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-dz5f5_e5163b9e-7df3-4616-a466-c36185e99397/cert-manager-webhook/0.log" Jan 22 14:37:09 crc kubenswrapper[4773]: I0122 14:37:09.006938 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-h2qxn_46c5b784-3baa-414b-b777-d2935b3d2056/nmstate-console-plugin/0.log" Jan 22 14:37:09 crc kubenswrapper[4773]: I0122 14:37:09.032805 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xz7xx_7ee9761c-0ccc-47f6-bea9-bb66195bf7a8/nmstate-handler/0.log" Jan 22 14:37:09 crc kubenswrapper[4773]: I0122 14:37:09.050210 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-k2w8h_20770427-fb79-4ce3-b2c7-1914a3e7c366/nmstate-metrics/0.log" Jan 22 14:37:09 crc kubenswrapper[4773]: I0122 14:37:09.057639 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-k2w8h_20770427-fb79-4ce3-b2c7-1914a3e7c366/kube-rbac-proxy/0.log" Jan 22 14:37:09 crc kubenswrapper[4773]: I0122 14:37:09.073176 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-sdxt4_ce53468d-145a-4861-bd2c-7ece42309269/nmstate-operator/0.log" Jan 22 14:37:09 crc kubenswrapper[4773]: I0122 14:37:09.085858 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-7v24g_288106d1-959b-4002-b615-77452081552a/nmstate-webhook/0.log" Jan 22 14:37:15 crc kubenswrapper[4773]: I0122 14:37:15.458840 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-sf2m2_20327f73-1346-42db-9404-cb8482061e15/prometheus-operator/0.log" Jan 22 14:37:15 crc kubenswrapper[4773]: I0122 14:37:15.476365 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc_49124815-040d-46dd-abaa-7f7dfe7e5d82/prometheus-operator-admission-webhook/0.log" Jan 22 14:37:15 crc kubenswrapper[4773]: I0122 14:37:15.492080 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq_eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8/prometheus-operator-admission-webhook/0.log" Jan 22 14:37:15 crc kubenswrapper[4773]: I0122 14:37:15.529184 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-n2j8w_f2d527ad-aed8-41c2-95f7-e4399e497cc6/operator/0.log" Jan 22 14:37:15 crc kubenswrapper[4773]: I0122 14:37:15.543656 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-s5rtt_3dc143c2-3387-49e7-be22-1a05d37d8fea/perses-operator/0.log" Jan 22 14:37:22 crc kubenswrapper[4773]: I0122 14:37:22.659442 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7vtfm_161e16b2-16b1-4485-aa07-4f8a766d347f/controller/0.log" Jan 22 14:37:22 crc kubenswrapper[4773]: I0122 14:37:22.666483 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7vtfm_161e16b2-16b1-4485-aa07-4f8a766d347f/kube-rbac-proxy/0.log" Jan 22 14:37:22 crc kubenswrapper[4773]: I0122 14:37:22.685111 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/controller/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.914568 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/frr/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.928662 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/reloader/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.938424 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/frr-metrics/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.945247 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/kube-rbac-proxy/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.957597 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/kube-rbac-proxy-frr/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.966242 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-frr-files/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.978801 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-reloader/0.log" Jan 22 14:37:25 crc kubenswrapper[4773]: I0122 14:37:25.991889 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-metrics/0.log" Jan 22 14:37:26 crc kubenswrapper[4773]: I0122 14:37:26.009225 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-s46dq_941adce7-fcb2-4191-920a-e9279f9ac0db/frr-k8s-webhook-server/0.log" Jan 22 14:37:26 crc kubenswrapper[4773]: I0122 14:37:26.036376 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-75c7758c8d-jv4lh_acc4b660-fb70-4713-ba23-597608298bc1/manager/0.log" Jan 22 14:37:26 crc kubenswrapper[4773]: I0122 14:37:26.046171 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-789dcb66b5-4k49c_3ada08ed-114c-4b1f-9110-9d4d60edaa27/webhook-server/0.log" Jan 22 14:37:26 crc kubenswrapper[4773]: I0122 14:37:26.915702 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8d28f_8f8f8131-f668-4d0c-be50-21c1eb7b62ff/speaker/0.log" Jan 22 14:37:26 crc kubenswrapper[4773]: I0122 14:37:26.928640 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8d28f_8f8f8131-f668-4d0c-be50-21c1eb7b62ff/kube-rbac-proxy/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.646575 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7_d833d986-4a39-441d-8e37-20974f894e6e/extract/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.655601 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7_d833d986-4a39-441d-8e37-20974f894e6e/util/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.699479 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arg9p7_d833d986-4a39-441d-8e37-20974f894e6e/pull/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.709633 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs_d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf/extract/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.724537 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs_d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf/util/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.798779 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcxx4fs_d0d6de0a-3747-4f22-bf08-6c1b3a96c1bf/pull/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.826417 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2_fe9f12c2-7f50-4f50-8df1-c21391452cbd/extract/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.833208 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2_fe9f12c2-7f50-4f50-8df1-c21391452cbd/util/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.853090 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71376mx2_fe9f12c2-7f50-4f50-8df1-c21391452cbd/pull/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.864132 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5_9f8be3bc-66a7-44ee-9eee-61aac695d0b0/extract/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.870264 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5_9f8be3bc-66a7-44ee-9eee-61aac695d0b0/util/0.log" Jan 22 14:37:28 crc kubenswrapper[4773]: I0122 14:37:28.888504 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08phkw5_9f8be3bc-66a7-44ee-9eee-61aac695d0b0/pull/0.log" Jan 22 14:37:30 crc kubenswrapper[4773]: I0122 14:37:30.082817 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g2cq4_d8e60f79-505e-4b43-b8eb-7e04eb7f567d/registry-server/0.log" Jan 22 14:37:30 crc kubenswrapper[4773]: I0122 14:37:30.088724 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g2cq4_d8e60f79-505e-4b43-b8eb-7e04eb7f567d/extract-utilities/0.log" Jan 22 14:37:30 crc kubenswrapper[4773]: I0122 14:37:30.094723 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g2cq4_d8e60f79-505e-4b43-b8eb-7e04eb7f567d/extract-content/0.log" Jan 22 14:37:31 crc kubenswrapper[4773]: I0122 14:37:31.973529 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xq9p_402b8ef2-9974-4dc0-bf8f-1259c87a71b7/registry-server/0.log" Jan 22 14:37:31 crc kubenswrapper[4773]: I0122 14:37:31.982948 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xq9p_402b8ef2-9974-4dc0-bf8f-1259c87a71b7/extract-utilities/0.log" Jan 22 14:37:31 crc kubenswrapper[4773]: I0122 14:37:31.989680 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xq9p_402b8ef2-9974-4dc0-bf8f-1259c87a71b7/extract-content/0.log" Jan 22 14:37:32 crc kubenswrapper[4773]: I0122 14:37:32.006479 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mw22t_6e54030d-7ce7-4a13-b4a3-e67889e7c22d/marketplace-operator/0.log" Jan 22 14:37:32 crc kubenswrapper[4773]: I0122 14:37:32.358576 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wg84l_80014b89-9e88-4fd8-a019-619d5e6382aa/registry-server/0.log" Jan 22 14:37:32 crc kubenswrapper[4773]: I0122 14:37:32.364302 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wg84l_80014b89-9e88-4fd8-a019-619d5e6382aa/extract-utilities/0.log" Jan 22 14:37:32 crc kubenswrapper[4773]: I0122 14:37:32.372806 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-wg84l_80014b89-9e88-4fd8-a019-619d5e6382aa/extract-content/0.log" Jan 22 14:37:33 crc kubenswrapper[4773]: I0122 14:37:33.601023 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-x8gvd_9b5e27c7-c979-4e6e-bf18-56d144b4256a/registry-server/0.log" Jan 22 14:37:33 crc kubenswrapper[4773]: I0122 14:37:33.607039 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-x8gvd_9b5e27c7-c979-4e6e-bf18-56d144b4256a/extract-utilities/0.log" Jan 22 14:37:33 crc kubenswrapper[4773]: I0122 14:37:33.616414 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-x8gvd_9b5e27c7-c979-4e6e-bf18-56d144b4256a/extract-content/0.log" Jan 22 14:37:34 crc kubenswrapper[4773]: I0122 14:37:34.074454 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:37:34 crc kubenswrapper[4773]: I0122 14:37:34.074537 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:37:35 crc kubenswrapper[4773]: I0122 14:37:35.125563 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-sf2m2_20327f73-1346-42db-9404-cb8482061e15/prometheus-operator/0.log" Jan 22 14:37:35 crc kubenswrapper[4773]: I0122 14:37:35.143423 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc_49124815-040d-46dd-abaa-7f7dfe7e5d82/prometheus-operator-admission-webhook/0.log" Jan 22 14:37:35 crc kubenswrapper[4773]: I0122 14:37:35.161463 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq_eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8/prometheus-operator-admission-webhook/0.log" Jan 22 14:37:35 crc kubenswrapper[4773]: I0122 14:37:35.193728 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-n2j8w_f2d527ad-aed8-41c2-95f7-e4399e497cc6/operator/0.log" Jan 22 14:37:35 crc kubenswrapper[4773]: I0122 14:37:35.204932 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-s5rtt_3dc143c2-3387-49e7-be22-1a05d37d8fea/perses-operator/0.log" Jan 22 14:38:04 crc kubenswrapper[4773]: I0122 14:38:04.075462 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:38:04 crc kubenswrapper[4773]: I0122 14:38:04.075972 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.074619 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.075094 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.075146 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.076049 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.076097 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" gracePeriod=600 Jan 22 14:38:34 crc kubenswrapper[4773]: E0122 14:38:34.786150 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.980846 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" exitCode=0 Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.980891 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446"} Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.980927 4773 scope.go:117] "RemoveContainer" containerID="0c99cf5bd72a8c83907456a25951bd2d402249dde0e1031ee27528783149ed73" Jan 22 14:38:34 crc kubenswrapper[4773]: I0122 14:38:34.981556 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:38:34 crc kubenswrapper[4773]: E0122 14:38:34.981955 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:38:45 crc kubenswrapper[4773]: I0122 14:38:45.658206 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:38:45 crc kubenswrapper[4773]: E0122 14:38:45.658892 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:38:59 crc kubenswrapper[4773]: I0122 14:38:59.661443 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:38:59 crc kubenswrapper[4773]: E0122 14:38:59.662356 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:39:10 crc kubenswrapper[4773]: I0122 14:39:10.658995 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:39:10 crc kubenswrapper[4773]: E0122 14:39:10.659835 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:39:21 crc kubenswrapper[4773]: I0122 14:39:21.658504 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:39:21 crc kubenswrapper[4773]: E0122 14:39:21.659179 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:39:35 crc kubenswrapper[4773]: I0122 14:39:35.658161 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:39:35 crc kubenswrapper[4773]: E0122 14:39:35.659504 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:39:36 crc kubenswrapper[4773]: I0122 14:39:36.922263 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-sf2m2_20327f73-1346-42db-9404-cb8482061e15/prometheus-operator/0.log" Jan 22 14:39:36 crc kubenswrapper[4773]: I0122 14:39:36.933672 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-679cdf99cb-cs5nc_49124815-040d-46dd-abaa-7f7dfe7e5d82/prometheus-operator-admission-webhook/0.log" Jan 22 14:39:36 crc kubenswrapper[4773]: I0122 14:39:36.963487 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-679cdf99cb-fx9wq_eca96eb1-ac66-4fe6-ab49-8d7cd7f343c8/prometheus-operator-admission-webhook/0.log" Jan 22 14:39:36 crc kubenswrapper[4773]: I0122 14:39:36.989436 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-n2j8w_f2d527ad-aed8-41c2-95f7-e4399e497cc6/operator/0.log" Jan 22 14:39:36 crc kubenswrapper[4773]: I0122 14:39:36.998866 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-s5rtt_3dc143c2-3387-49e7-be22-1a05d37d8fea/perses-operator/0.log" Jan 22 14:39:37 crc kubenswrapper[4773]: I0122 14:39:37.136103 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qmnzk_220217e9-63dc-4215-b315-3c9290e38510/cert-manager-controller/0.log" Jan 22 14:39:37 crc kubenswrapper[4773]: I0122 14:39:37.154774 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-qnqwg_b6587a6b-06d7-43c0-8db4-dcc854df334d/cert-manager-cainjector/0.log" Jan 22 14:39:37 crc kubenswrapper[4773]: I0122 14:39:37.162645 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-dz5f5_e5163b9e-7df3-4616-a466-c36185e99397/cert-manager-webhook/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.268576 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-9nlp4_73fae2b3-a45d-431f-9113-7f669d5eff6d/manager/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.363566 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-k7bqw_e0e2887e-f5a6-48e6-862c-593b909d5514/manager/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.383497 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-vkt5m_4429281b-22db-4df9-8c28-bb30e527b1f6/manager/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.400310 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/extract/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.407691 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/util/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.427814 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/pull/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.477094 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7vtfm_161e16b2-16b1-4485-aa07-4f8a766d347f/controller/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.494906 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-7vtfm_161e16b2-16b1-4485-aa07-4f8a766d347f/kube-rbac-proxy/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.535451 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/controller/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.646158 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-lm8jc_a1965d1a-cb7f-4da6-90ab-1f75449e3e97/manager/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.726779 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-br2hg_54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0/manager/0.log" Jan 22 14:39:38 crc kubenswrapper[4773]: I0122 14:39:38.753056 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-ztsqs_9aeccfa1-86f0-47e3-96c7-e0d018d24537/manager/0.log" Jan 22 14:39:39 crc kubenswrapper[4773]: I0122 14:39:39.590635 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-dlzxj_29637656-53e6-4957-88ea-2445b706ec08/manager/0.log" Jan 22 14:39:39 crc kubenswrapper[4773]: I0122 14:39:39.959528 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-7mdlm_b6ef28d3-92bf-43a7-a577-c4ac162ab48a/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.164666 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-x6894_02d2e417-7591-4d07-850e-4c670b40d1ea/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.188103 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-sd745_cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.270274 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-rjsjw_be3892a0-8b94-459b-9d05-9aae47107554/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.368487 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-vlvhw_6e27f8a3-a214-47ec-9027-1a503d588e59/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.579615 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b8bc8d87d-t4ws7_639db363-b628-4c24-be20-57a1bf05c986/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.708045 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-qwf6d_85ad5e61-8c92-4856-a34c-7d02aadbbc43/manager/0.log" Jan 22 14:39:40 crc kubenswrapper[4773]: I0122 14:39:40.739627 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7c9c58b55799pgj_fd720cc2-9948-4a4d-951f-17a20558e0e2/manager/0.log" Jan 22 14:39:41 crc kubenswrapper[4773]: I0122 14:39:41.026737 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-698d6bb84b-m5n97_5ddce1ac-9a5f-4096-b21f-77dc07b68c2d/operator/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.109786 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/frr/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.123544 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/reloader/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.128107 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/frr-metrics/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.144852 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/kube-rbac-proxy/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.159527 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/kube-rbac-proxy-frr/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.166365 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-frr-files/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.173208 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-reloader/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.181363 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-28242_574153bd-b3c2-4a73-81d2-15b68de060ca/cp-metrics/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.190850 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-s46dq_941adce7-fcb2-4191-920a-e9279f9ac0db/frr-k8s-webhook-server/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.222253 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-75c7758c8d-jv4lh_acc4b660-fb70-4713-ba23-597608298bc1/manager/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.233004 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-789dcb66b5-4k49c_3ada08ed-114c-4b1f-9110-9d4d60edaa27/webhook-server/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.549168 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-788c8b99b5-fh84n_4947f3f4-af75-45ed-9481-f4c8f3e525d8/manager/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.825037 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-jvwbz_d86424e7-7383-4bbe-8610-2ac8fdc8143a/registry-server/0.log" Jan 22 14:39:44 crc kubenswrapper[4773]: I0122 14:39:44.971512 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-lx75p_813b6b20-e15b-4da5-959a-d719f973a4e5/manager/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.015651 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-h6ggd_592864bf-34a6-4335-9425-72386e772818/manager/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.051075 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-qgnch_7da5c99e-8a28-4671-b7fb-43ec8b4d6faf/operator/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.116699 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-msvwn_7e2cd62a-874a-4d1b-a706-439c0e7756c0/manager/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.392379 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8d28f_8f8f8131-f668-4d0c-be50-21c1eb7b62ff/speaker/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.412672 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-8d28f_8f8f8131-f668-4d0c-be50-21c1eb7b62ff/kube-rbac-proxy/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.422115 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-rx8n5_d4fe0850-2785-4433-8e0a-28efdea91b64/manager/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.434401 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-9gcmw_e817d93c-c5c3-44ed-92aa-e761dda7eaa9/manager/0.log" Jan 22 14:39:45 crc kubenswrapper[4773]: I0122 14:39:45.484776 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-ptq9w_9731c1da-ba60-4e4a-af76-4e870c0f6e35/manager/0.log" Jan 22 14:39:46 crc kubenswrapper[4773]: I0122 14:39:46.389838 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qmnzk_220217e9-63dc-4215-b315-3c9290e38510/cert-manager-controller/0.log" Jan 22 14:39:46 crc kubenswrapper[4773]: I0122 14:39:46.415324 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-qnqwg_b6587a6b-06d7-43c0-8db4-dcc854df334d/cert-manager-cainjector/0.log" Jan 22 14:39:46 crc kubenswrapper[4773]: I0122 14:39:46.425998 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-dz5f5_e5163b9e-7df3-4616-a466-c36185e99397/cert-manager-webhook/0.log" Jan 22 14:39:46 crc kubenswrapper[4773]: I0122 14:39:46.658210 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:39:46 crc kubenswrapper[4773]: E0122 14:39:46.658526 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.074942 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-h2qxn_46c5b784-3baa-414b-b777-d2935b3d2056/nmstate-console-plugin/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.094377 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xz7xx_7ee9761c-0ccc-47f6-bea9-bb66195bf7a8/nmstate-handler/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.102216 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-k2w8h_20770427-fb79-4ce3-b2c7-1914a3e7c366/nmstate-metrics/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.108398 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-k2w8h_20770427-fb79-4ce3-b2c7-1914a3e7c366/kube-rbac-proxy/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.125006 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-sdxt4_ce53468d-145a-4861-bd2c-7ece42309269/nmstate-operator/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.141658 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-7v24g_288106d1-959b-4002-b615-77452081552a/nmstate-webhook/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.180406 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2hwmg_2e31d232-2138-4dd6-9b7d-71f87e414a01/control-plane-machine-set-operator/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.227450 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jm24g_1a76da48-177b-429e-a136-d78afeae02aa/kube-rbac-proxy/0.log" Jan 22 14:39:47 crc kubenswrapper[4773]: I0122 14:39:47.244590 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-jm24g_1a76da48-177b-429e-a136-d78afeae02aa/machine-api-operator/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.444432 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59dd8b7cbf-9nlp4_73fae2b3-a45d-431f-9113-7f669d5eff6d/manager/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.514595 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-69cf5d4557-k7bqw_e0e2887e-f5a6-48e6-862c-593b909d5514/manager/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.548538 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-vkt5m_4429281b-22db-4df9-8c28-bb30e527b1f6/manager/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.578497 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/extract/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.589626 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/util/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.603564 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa7ace019a3f96c6dacf22bf83e494fa656797b1a183803f1a18d1a3f99g4jh_8948768d-c93c-4a29-a93c-8c449a1980c3/pull/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.740637 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-lm8jc_a1965d1a-cb7f-4da6-90ab-1f75449e3e97/manager/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.786341 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-br2hg_54683bfd-5bbf-47c6-9f3f-5bc4583ef5f0/manager/0.log" Jan 22 14:39:48 crc kubenswrapper[4773]: I0122 14:39:48.805141 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-ztsqs_9aeccfa1-86f0-47e3-96c7-e0d018d24537/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.459307 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-54ccf4f85d-dlzxj_29637656-53e6-4957-88ea-2445b706ec08/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.473011 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-69d6c9f5b8-7mdlm_b6ef28d3-92bf-43a7-a577-c4ac162ab48a/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.597134 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-x6894_02d2e417-7591-4d07-850e-4c670b40d1ea/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.610156 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-sd745_cfe59bee-7ac2-4f1f-ac59-ab2ae4b1c24e/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.675462 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-c87fff755-rjsjw_be3892a0-8b94-459b-9d05-9aae47107554/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.747625 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5d8f59fb49-vlvhw_6e27f8a3-a214-47ec-9027-1a503d588e59/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.889272 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6b8bc8d87d-t4ws7_639db363-b628-4c24-be20-57a1bf05c986/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.944075 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-7bd9774b6-qwf6d_85ad5e61-8c92-4856-a34c-7d02aadbbc43/manager/0.log" Jan 22 14:39:49 crc kubenswrapper[4773]: I0122 14:39:49.965429 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7c9c58b55799pgj_fd720cc2-9948-4a4d-951f-17a20558e0e2/manager/0.log" Jan 22 14:39:50 crc kubenswrapper[4773]: I0122 14:39:50.183728 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-698d6bb84b-m5n97_5ddce1ac-9a5f-4096-b21f-77dc07b68c2d/operator/0.log" Jan 22 14:39:52 crc kubenswrapper[4773]: I0122 14:39:52.911831 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-788c8b99b5-fh84n_4947f3f4-af75-45ed-9481-f4c8f3e525d8/manager/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.088542 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-jvwbz_d86424e7-7383-4bbe-8610-2ac8fdc8143a/registry-server/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.186455 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-55db956ddc-lx75p_813b6b20-e15b-4da5-959a-d719f973a4e5/manager/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.226679 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5d646b7d76-h6ggd_592864bf-34a6-4335-9425-72386e772818/manager/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.259511 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-qgnch_7da5c99e-8a28-4671-b7fb-43ec8b4d6faf/operator/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.295400 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-msvwn_7e2cd62a-874a-4d1b-a706-439c0e7756c0/manager/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.543568 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-rx8n5_d4fe0850-2785-4433-8e0a-28efdea91b64/manager/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.555278 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-9gcmw_e817d93c-c5c3-44ed-92aa-e761dda7eaa9/manager/0.log" Jan 22 14:39:53 crc kubenswrapper[4773]: I0122 14:39:53.567101 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5ffb9c6597-ptq9w_9731c1da-ba60-4e4a-af76-4e870c0f6e35/manager/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.178246 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/kube-multus-additional-cni-plugins/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.190220 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/egress-router-binary-copy/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.200271 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/cni-plugins/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.209585 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/bond-cni-plugin/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.219592 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/routeoverride-cni/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.231508 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/whereabouts-cni-bincopy/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.238853 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-9bldd_6f173bdf-8981-4274-8d1b-ec68a44fefa7/whereabouts-cni/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.301641 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-r7gll_244704c5-6799-48ab-9c90-8c38ae7f3c5e/multus-admission-controller/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.313966 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-r7gll_244704c5-6799-48ab-9c90-8c38ae7f3c5e/kube-rbac-proxy/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.381131 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/2.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.547407 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsndt_73fd13f5-159b-444d-9d03-1e5fdd943673/kube-multus/3.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.804986 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-5tqwr_c1ed4b8d-def5-474b-8629-cd0bae7e49a6/network-metrics-daemon/0.log" Jan 22 14:39:56 crc kubenswrapper[4773]: I0122 14:39:56.814680 4773 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-5tqwr_c1ed4b8d-def5-474b-8629-cd0bae7e49a6/kube-rbac-proxy/0.log" Jan 22 14:40:00 crc kubenswrapper[4773]: I0122 14:40:00.659437 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:40:00 crc kubenswrapper[4773]: E0122 14:40:00.660214 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:40:14 crc kubenswrapper[4773]: I0122 14:40:14.659988 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:40:14 crc kubenswrapper[4773]: E0122 14:40:14.660952 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:40:25 crc kubenswrapper[4773]: I0122 14:40:25.658536 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:40:25 crc kubenswrapper[4773]: E0122 14:40:25.659179 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:40:39 crc kubenswrapper[4773]: I0122 14:40:39.658479 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:40:39 crc kubenswrapper[4773]: E0122 14:40:39.660741 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:40:52 crc kubenswrapper[4773]: I0122 14:40:52.681617 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:40:52 crc kubenswrapper[4773]: E0122 14:40:52.683236 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:41:07 crc kubenswrapper[4773]: I0122 14:41:07.660277 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:41:07 crc kubenswrapper[4773]: E0122 14:41:07.661600 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.703374 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-57wdz"] Jan 22 14:41:09 crc kubenswrapper[4773]: E0122 14:41:09.705706 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="registry-server" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.705877 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="registry-server" Jan 22 14:41:09 crc kubenswrapper[4773]: E0122 14:41:09.705991 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="extract-utilities" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.706076 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="extract-utilities" Jan 22 14:41:09 crc kubenswrapper[4773]: E0122 14:41:09.706251 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="registry-server" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.706360 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="registry-server" Jan 22 14:41:09 crc kubenswrapper[4773]: E0122 14:41:09.706463 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="extract-utilities" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.706546 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="extract-utilities" Jan 22 14:41:09 crc kubenswrapper[4773]: E0122 14:41:09.706680 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="extract-content" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.706768 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="extract-content" Jan 22 14:41:09 crc kubenswrapper[4773]: E0122 14:41:09.706876 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="extract-content" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.706974 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="extract-content" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.707441 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="2caeb528-c83d-4535-be7d-e9b2afee7ff4" containerName="registry-server" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.707628 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee930bb5-f551-4f82-b6bc-71b93585588a" containerName="registry-server" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.711004 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.721023 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57wdz"] Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.755630 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-utilities\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.768785 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-catalog-content\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.768900 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqvk2\" (UniqueName: \"kubernetes.io/projected/19508698-2a01-4abb-bb5c-4807fbb59636-kube-api-access-mqvk2\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.871421 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-catalog-content\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.871764 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqvk2\" (UniqueName: \"kubernetes.io/projected/19508698-2a01-4abb-bb5c-4807fbb59636-kube-api-access-mqvk2\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.872034 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-catalog-content\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.872051 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-utilities\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.872664 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-utilities\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:09 crc kubenswrapper[4773]: I0122 14:41:09.900111 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqvk2\" (UniqueName: \"kubernetes.io/projected/19508698-2a01-4abb-bb5c-4807fbb59636-kube-api-access-mqvk2\") pod \"redhat-marketplace-57wdz\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:10 crc kubenswrapper[4773]: I0122 14:41:10.066015 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:10 crc kubenswrapper[4773]: I0122 14:41:10.594461 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-57wdz"] Jan 22 14:41:11 crc kubenswrapper[4773]: I0122 14:41:11.007632 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerStarted","Data":"b8febc5f0b6172bf6f00cfab381ff40b64c3a2a6838f9588fd3b446d1291bb31"} Jan 22 14:41:12 crc kubenswrapper[4773]: E0122 14:41:12.661700 4773 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19508698_2a01_4abb_bb5c_4807fbb59636.slice/crio-conmon-7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a.scope\": RecentStats: unable to find data in memory cache]" Jan 22 14:41:13 crc kubenswrapper[4773]: I0122 14:41:13.039232 4773 generic.go:334] "Generic (PLEG): container finished" podID="19508698-2a01-4abb-bb5c-4807fbb59636" containerID="7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a" exitCode=0 Jan 22 14:41:13 crc kubenswrapper[4773]: I0122 14:41:13.039304 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerDied","Data":"7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a"} Jan 22 14:41:13 crc kubenswrapper[4773]: I0122 14:41:13.042402 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:41:15 crc kubenswrapper[4773]: I0122 14:41:15.067371 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerStarted","Data":"becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b"} Jan 22 14:41:16 crc kubenswrapper[4773]: I0122 14:41:16.082587 4773 generic.go:334] "Generic (PLEG): container finished" podID="19508698-2a01-4abb-bb5c-4807fbb59636" containerID="becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b" exitCode=0 Jan 22 14:41:16 crc kubenswrapper[4773]: I0122 14:41:16.082854 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerDied","Data":"becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b"} Jan 22 14:41:17 crc kubenswrapper[4773]: I0122 14:41:17.100851 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerStarted","Data":"62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b"} Jan 22 14:41:20 crc kubenswrapper[4773]: I0122 14:41:20.066533 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:20 crc kubenswrapper[4773]: I0122 14:41:20.067180 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:20 crc kubenswrapper[4773]: I0122 14:41:20.166885 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:20 crc kubenswrapper[4773]: I0122 14:41:20.196707 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-57wdz" podStartSLOduration=7.7335740600000005 podStartE2EDuration="11.196679541s" podCreationTimestamp="2026-01-22 14:41:09 +0000 UTC" firstStartedPulling="2026-01-22 14:41:13.0421115 +0000 UTC m=+9980.620227335" lastFinishedPulling="2026-01-22 14:41:16.505216981 +0000 UTC m=+9984.083332816" observedRunningTime="2026-01-22 14:41:17.11857736 +0000 UTC m=+9984.696693205" watchObservedRunningTime="2026-01-22 14:41:20.196679541 +0000 UTC m=+9987.774795366" Jan 22 14:41:20 crc kubenswrapper[4773]: I0122 14:41:20.658708 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:41:20 crc kubenswrapper[4773]: E0122 14:41:20.659003 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.153737 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.224711 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57wdz"] Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.267842 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-57wdz" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="registry-server" containerID="cri-o://62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b" gracePeriod=2 Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.767209 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.875316 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-catalog-content\") pod \"19508698-2a01-4abb-bb5c-4807fbb59636\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.875736 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-utilities\") pod \"19508698-2a01-4abb-bb5c-4807fbb59636\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.875874 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqvk2\" (UniqueName: \"kubernetes.io/projected/19508698-2a01-4abb-bb5c-4807fbb59636-kube-api-access-mqvk2\") pod \"19508698-2a01-4abb-bb5c-4807fbb59636\" (UID: \"19508698-2a01-4abb-bb5c-4807fbb59636\") " Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.876630 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-utilities" (OuterVolumeSpecName: "utilities") pod "19508698-2a01-4abb-bb5c-4807fbb59636" (UID: "19508698-2a01-4abb-bb5c-4807fbb59636"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.886240 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19508698-2a01-4abb-bb5c-4807fbb59636-kube-api-access-mqvk2" (OuterVolumeSpecName: "kube-api-access-mqvk2") pod "19508698-2a01-4abb-bb5c-4807fbb59636" (UID: "19508698-2a01-4abb-bb5c-4807fbb59636"). InnerVolumeSpecName "kube-api-access-mqvk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.900885 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19508698-2a01-4abb-bb5c-4807fbb59636" (UID: "19508698-2a01-4abb-bb5c-4807fbb59636"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.978694 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.979066 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqvk2\" (UniqueName: \"kubernetes.io/projected/19508698-2a01-4abb-bb5c-4807fbb59636-kube-api-access-mqvk2\") on node \"crc\" DevicePath \"\"" Jan 22 14:41:30 crc kubenswrapper[4773]: I0122 14:41:30.979161 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19508698-2a01-4abb-bb5c-4807fbb59636-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.280643 4773 generic.go:334] "Generic (PLEG): container finished" podID="19508698-2a01-4abb-bb5c-4807fbb59636" containerID="62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b" exitCode=0 Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.280732 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-57wdz" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.280758 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerDied","Data":"62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b"} Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.282467 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-57wdz" event={"ID":"19508698-2a01-4abb-bb5c-4807fbb59636","Type":"ContainerDied","Data":"b8febc5f0b6172bf6f00cfab381ff40b64c3a2a6838f9588fd3b446d1291bb31"} Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.282494 4773 scope.go:117] "RemoveContainer" containerID="62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.307332 4773 scope.go:117] "RemoveContainer" containerID="becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.336153 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-57wdz"] Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.349572 4773 scope.go:117] "RemoveContainer" containerID="7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.353950 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-57wdz"] Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.442414 4773 scope.go:117] "RemoveContainer" containerID="62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b" Jan 22 14:41:31 crc kubenswrapper[4773]: E0122 14:41:31.443194 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b\": container with ID starting with 62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b not found: ID does not exist" containerID="62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.443244 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b"} err="failed to get container status \"62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b\": rpc error: code = NotFound desc = could not find container \"62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b\": container with ID starting with 62379265cab5be863b9d9f3cd89b5b2d12000b33d2ed83cae548e59ed3308b5b not found: ID does not exist" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.443320 4773 scope.go:117] "RemoveContainer" containerID="becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b" Jan 22 14:41:31 crc kubenswrapper[4773]: E0122 14:41:31.443722 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b\": container with ID starting with becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b not found: ID does not exist" containerID="becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.443779 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b"} err="failed to get container status \"becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b\": rpc error: code = NotFound desc = could not find container \"becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b\": container with ID starting with becef746fc631ae6084f9084c7eb728159e0135e2ca354e4ea35be3981730b3b not found: ID does not exist" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.443816 4773 scope.go:117] "RemoveContainer" containerID="7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a" Jan 22 14:41:31 crc kubenswrapper[4773]: E0122 14:41:31.444562 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a\": container with ID starting with 7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a not found: ID does not exist" containerID="7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a" Jan 22 14:41:31 crc kubenswrapper[4773]: I0122 14:41:31.444590 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a"} err="failed to get container status \"7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a\": rpc error: code = NotFound desc = could not find container \"7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a\": container with ID starting with 7046b8dae4c97575af394c7cac8e09bb504c735b89b3fecac40f7378945d132a not found: ID does not exist" Jan 22 14:41:32 crc kubenswrapper[4773]: I0122 14:41:32.666319 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:41:32 crc kubenswrapper[4773]: E0122 14:41:32.667246 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:41:32 crc kubenswrapper[4773]: I0122 14:41:32.682514 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" path="/var/lib/kubelet/pods/19508698-2a01-4abb-bb5c-4807fbb59636/volumes" Jan 22 14:41:45 crc kubenswrapper[4773]: I0122 14:41:45.663035 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:41:45 crc kubenswrapper[4773]: E0122 14:41:45.666837 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:41:59 crc kubenswrapper[4773]: I0122 14:41:59.660621 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:41:59 crc kubenswrapper[4773]: E0122 14:41:59.662925 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:42:11 crc kubenswrapper[4773]: I0122 14:42:11.660033 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:42:11 crc kubenswrapper[4773]: E0122 14:42:11.661087 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:42:26 crc kubenswrapper[4773]: I0122 14:42:26.658351 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:42:26 crc kubenswrapper[4773]: E0122 14:42:26.659248 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:42:41 crc kubenswrapper[4773]: I0122 14:42:41.660526 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:42:41 crc kubenswrapper[4773]: E0122 14:42:41.663358 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:42:54 crc kubenswrapper[4773]: I0122 14:42:54.659465 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:42:54 crc kubenswrapper[4773]: E0122 14:42:54.660662 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:43:05 crc kubenswrapper[4773]: I0122 14:43:05.659754 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:43:05 crc kubenswrapper[4773]: E0122 14:43:05.660804 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:43:16 crc kubenswrapper[4773]: I0122 14:43:16.658844 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:43:16 crc kubenswrapper[4773]: E0122 14:43:16.659780 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:43:30 crc kubenswrapper[4773]: I0122 14:43:30.658634 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:43:30 crc kubenswrapper[4773]: E0122 14:43:30.659708 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:43:45 crc kubenswrapper[4773]: I0122 14:43:45.658618 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:43:46 crc kubenswrapper[4773]: I0122 14:43:46.060169 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"1e1def94c3cc40653286bfda7cb9eb46166973f52128d43b13de1f5af22fd912"} Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.180687 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g"] Jan 22 14:45:00 crc kubenswrapper[4773]: E0122 14:45:00.182590 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="extract-content" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.182625 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="extract-content" Jan 22 14:45:00 crc kubenswrapper[4773]: E0122 14:45:00.182684 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="extract-utilities" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.182702 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="extract-utilities" Jan 22 14:45:00 crc kubenswrapper[4773]: E0122 14:45:00.182749 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="registry-server" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.182765 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="registry-server" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.183368 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="19508698-2a01-4abb-bb5c-4807fbb59636" containerName="registry-server" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.185060 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.188368 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.188963 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.211512 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g"] Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.238254 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-config-volume\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.238391 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6h88\" (UniqueName: \"kubernetes.io/projected/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-kube-api-access-r6h88\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.238583 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-secret-volume\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.341880 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-secret-volume\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.342141 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-config-volume\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.342259 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6h88\" (UniqueName: \"kubernetes.io/projected/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-kube-api-access-r6h88\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.343610 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-config-volume\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.352587 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-secret-volume\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.367173 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6h88\" (UniqueName: \"kubernetes.io/projected/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-kube-api-access-r6h88\") pod \"collect-profiles-29484885-t5h9g\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.509972 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:00 crc kubenswrapper[4773]: I0122 14:45:00.996728 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g"] Jan 22 14:45:01 crc kubenswrapper[4773]: I0122 14:45:01.099174 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" event={"ID":"da4e6db3-7ef7-4c6b-abba-396a3001ffeb","Type":"ContainerStarted","Data":"5d0e8a6806c15f01d194b2f06e52a79f52f60cf5bd16777d76fe5fa6967d5a04"} Jan 22 14:45:02 crc kubenswrapper[4773]: I0122 14:45:02.111407 4773 generic.go:334] "Generic (PLEG): container finished" podID="da4e6db3-7ef7-4c6b-abba-396a3001ffeb" containerID="b5612a0042b87d76cc033d47fc9ef8b5246dd13642610be5a5ba7db19461807a" exitCode=0 Jan 22 14:45:02 crc kubenswrapper[4773]: I0122 14:45:02.111592 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" event={"ID":"da4e6db3-7ef7-4c6b-abba-396a3001ffeb","Type":"ContainerDied","Data":"b5612a0042b87d76cc033d47fc9ef8b5246dd13642610be5a5ba7db19461807a"} Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.747931 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.842112 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-config-volume\") pod \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.842218 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6h88\" (UniqueName: \"kubernetes.io/projected/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-kube-api-access-r6h88\") pod \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.842869 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-config-volume" (OuterVolumeSpecName: "config-volume") pod "da4e6db3-7ef7-4c6b-abba-396a3001ffeb" (UID: "da4e6db3-7ef7-4c6b-abba-396a3001ffeb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.843514 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-secret-volume\") pod \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\" (UID: \"da4e6db3-7ef7-4c6b-abba-396a3001ffeb\") " Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.844446 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.848734 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "da4e6db3-7ef7-4c6b-abba-396a3001ffeb" (UID: "da4e6db3-7ef7-4c6b-abba-396a3001ffeb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.848903 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-kube-api-access-r6h88" (OuterVolumeSpecName: "kube-api-access-r6h88") pod "da4e6db3-7ef7-4c6b-abba-396a3001ffeb" (UID: "da4e6db3-7ef7-4c6b-abba-396a3001ffeb"). InnerVolumeSpecName "kube-api-access-r6h88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.947858 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6h88\" (UniqueName: \"kubernetes.io/projected/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-kube-api-access-r6h88\") on node \"crc\" DevicePath \"\"" Jan 22 14:45:03 crc kubenswrapper[4773]: I0122 14:45:03.947908 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/da4e6db3-7ef7-4c6b-abba-396a3001ffeb-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:45:04 crc kubenswrapper[4773]: I0122 14:45:04.145199 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" event={"ID":"da4e6db3-7ef7-4c6b-abba-396a3001ffeb","Type":"ContainerDied","Data":"5d0e8a6806c15f01d194b2f06e52a79f52f60cf5bd16777d76fe5fa6967d5a04"} Jan 22 14:45:04 crc kubenswrapper[4773]: I0122 14:45:04.145256 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d0e8a6806c15f01d194b2f06e52a79f52f60cf5bd16777d76fe5fa6967d5a04" Jan 22 14:45:04 crc kubenswrapper[4773]: I0122 14:45:04.145340 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484885-t5h9g" Jan 22 14:45:04 crc kubenswrapper[4773]: I0122 14:45:04.857980 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp"] Jan 22 14:45:04 crc kubenswrapper[4773]: I0122 14:45:04.870904 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484840-q28qp"] Jan 22 14:45:06 crc kubenswrapper[4773]: I0122 14:45:06.671757 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="994bc68e-a0f4-4085-a0df-f647dc8c462c" path="/var/lib/kubelet/pods/994bc68e-a0f4-4085-a0df-f647dc8c462c/volumes" Jan 22 14:45:15 crc kubenswrapper[4773]: I0122 14:45:15.864453 4773 scope.go:117] "RemoveContainer" containerID="3d7e4dbaa2ed016e4d71f63074864e1db6834f31f392eb6317abfedba9f24df7" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.386257 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-45qfz"] Jan 22 14:45:43 crc kubenswrapper[4773]: E0122 14:45:43.387623 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da4e6db3-7ef7-4c6b-abba-396a3001ffeb" containerName="collect-profiles" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.387646 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="da4e6db3-7ef7-4c6b-abba-396a3001ffeb" containerName="collect-profiles" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.387992 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="da4e6db3-7ef7-4c6b-abba-396a3001ffeb" containerName="collect-profiles" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.391136 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.427515 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45qfz"] Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.459588 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-catalog-content\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.459722 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs2w7\" (UniqueName: \"kubernetes.io/projected/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-kube-api-access-xs2w7\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.459750 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-utilities\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.562210 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-catalog-content\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.562622 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs2w7\" (UniqueName: \"kubernetes.io/projected/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-kube-api-access-xs2w7\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.563111 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-catalog-content\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.562658 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-utilities\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.564553 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-utilities\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.584648 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs2w7\" (UniqueName: \"kubernetes.io/projected/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-kube-api-access-xs2w7\") pod \"community-operators-45qfz\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:43 crc kubenswrapper[4773]: I0122 14:45:43.726355 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:44 crc kubenswrapper[4773]: I0122 14:45:44.319373 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45qfz"] Jan 22 14:45:45 crc kubenswrapper[4773]: I0122 14:45:45.004447 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerStarted","Data":"e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130"} Jan 22 14:45:45 crc kubenswrapper[4773]: I0122 14:45:45.004713 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerStarted","Data":"142e94fa1500f4edb5e6cd5e5ce33de6d7639dbaa98ae247e15b47c5c01cf5c4"} Jan 22 14:45:46 crc kubenswrapper[4773]: I0122 14:45:46.024510 4773 generic.go:334] "Generic (PLEG): container finished" podID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerID="e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130" exitCode=0 Jan 22 14:45:46 crc kubenswrapper[4773]: I0122 14:45:46.024750 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerDied","Data":"e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130"} Jan 22 14:45:48 crc kubenswrapper[4773]: I0122 14:45:48.064692 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerStarted","Data":"ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56"} Jan 22 14:45:49 crc kubenswrapper[4773]: I0122 14:45:49.081664 4773 generic.go:334] "Generic (PLEG): container finished" podID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerID="ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56" exitCode=0 Jan 22 14:45:49 crc kubenswrapper[4773]: I0122 14:45:49.081728 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerDied","Data":"ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56"} Jan 22 14:45:52 crc kubenswrapper[4773]: I0122 14:45:52.189456 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerStarted","Data":"208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107"} Jan 22 14:45:52 crc kubenswrapper[4773]: I0122 14:45:52.232409 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-45qfz" podStartSLOduration=4.323857897 podStartE2EDuration="9.232385629s" podCreationTimestamp="2026-01-22 14:45:43 +0000 UTC" firstStartedPulling="2026-01-22 14:45:46.028515169 +0000 UTC m=+10253.606630994" lastFinishedPulling="2026-01-22 14:45:50.937042901 +0000 UTC m=+10258.515158726" observedRunningTime="2026-01-22 14:45:52.216952813 +0000 UTC m=+10259.795068648" watchObservedRunningTime="2026-01-22 14:45:52.232385629 +0000 UTC m=+10259.810501464" Jan 22 14:45:53 crc kubenswrapper[4773]: I0122 14:45:53.953927 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:53 crc kubenswrapper[4773]: I0122 14:45:53.954241 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:45:54 crc kubenswrapper[4773]: I0122 14:45:54.109696 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:46:03 crc kubenswrapper[4773]: I0122 14:46:03.793170 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:46:03 crc kubenswrapper[4773]: I0122 14:46:03.850026 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45qfz"] Jan 22 14:46:04 crc kubenswrapper[4773]: I0122 14:46:04.075356 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:46:04 crc kubenswrapper[4773]: I0122 14:46:04.075457 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:46:04 crc kubenswrapper[4773]: I0122 14:46:04.618171 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-45qfz" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="registry-server" containerID="cri-o://208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107" gracePeriod=2 Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.180564 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.363410 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xs2w7\" (UniqueName: \"kubernetes.io/projected/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-kube-api-access-xs2w7\") pod \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.363508 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-utilities\") pod \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.363590 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-catalog-content\") pod \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\" (UID: \"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6\") " Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.364722 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-utilities" (OuterVolumeSpecName: "utilities") pod "bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" (UID: "bdd02d51-c98d-43eb-ae4a-2fa6b03963a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.370148 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-kube-api-access-xs2w7" (OuterVolumeSpecName: "kube-api-access-xs2w7") pod "bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" (UID: "bdd02d51-c98d-43eb-ae4a-2fa6b03963a6"). InnerVolumeSpecName "kube-api-access-xs2w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.431327 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" (UID: "bdd02d51-c98d-43eb-ae4a-2fa6b03963a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.466176 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xs2w7\" (UniqueName: \"kubernetes.io/projected/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-kube-api-access-xs2w7\") on node \"crc\" DevicePath \"\"" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.466209 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.466232 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.631148 4773 generic.go:334] "Generic (PLEG): container finished" podID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerID="208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107" exitCode=0 Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.631207 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerDied","Data":"208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107"} Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.631241 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45qfz" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.631322 4773 scope.go:117] "RemoveContainer" containerID="208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.631243 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45qfz" event={"ID":"bdd02d51-c98d-43eb-ae4a-2fa6b03963a6","Type":"ContainerDied","Data":"142e94fa1500f4edb5e6cd5e5ce33de6d7639dbaa98ae247e15b47c5c01cf5c4"} Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.684571 4773 scope.go:117] "RemoveContainer" containerID="ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.692651 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45qfz"] Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.705141 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-45qfz"] Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.716165 4773 scope.go:117] "RemoveContainer" containerID="e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.770434 4773 scope.go:117] "RemoveContainer" containerID="208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107" Jan 22 14:46:05 crc kubenswrapper[4773]: E0122 14:46:05.770970 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107\": container with ID starting with 208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107 not found: ID does not exist" containerID="208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.771162 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107"} err="failed to get container status \"208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107\": rpc error: code = NotFound desc = could not find container \"208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107\": container with ID starting with 208a8e5c2797d21f2159de6417c7703868a2fe6e348a74b4ec7fc4842a7ff107 not found: ID does not exist" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.771325 4773 scope.go:117] "RemoveContainer" containerID="ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56" Jan 22 14:46:05 crc kubenswrapper[4773]: E0122 14:46:05.772099 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56\": container with ID starting with ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56 not found: ID does not exist" containerID="ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.772138 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56"} err="failed to get container status \"ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56\": rpc error: code = NotFound desc = could not find container \"ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56\": container with ID starting with ceebacc0855dedc6d479cdd44b0f30d9c1e1386031931703b7ec9dc0cb893b56 not found: ID does not exist" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.772167 4773 scope.go:117] "RemoveContainer" containerID="e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130" Jan 22 14:46:05 crc kubenswrapper[4773]: E0122 14:46:05.772550 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130\": container with ID starting with e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130 not found: ID does not exist" containerID="e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130" Jan 22 14:46:05 crc kubenswrapper[4773]: I0122 14:46:05.772704 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130"} err="failed to get container status \"e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130\": rpc error: code = NotFound desc = could not find container \"e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130\": container with ID starting with e781bc39ec45bc43e0cf6796ae1a31a283009d7a8506411fbaa2c4a8a48ce130 not found: ID does not exist" Jan 22 14:46:06 crc kubenswrapper[4773]: I0122 14:46:06.677809 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" path="/var/lib/kubelet/pods/bdd02d51-c98d-43eb-ae4a-2fa6b03963a6/volumes" Jan 22 14:46:32 crc kubenswrapper[4773]: I0122 14:46:32.988132 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t9j8d"] Jan 22 14:46:32 crc kubenswrapper[4773]: E0122 14:46:32.990652 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="extract-content" Jan 22 14:46:32 crc kubenswrapper[4773]: I0122 14:46:32.990738 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="extract-content" Jan 22 14:46:32 crc kubenswrapper[4773]: E0122 14:46:32.990939 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="extract-utilities" Jan 22 14:46:32 crc kubenswrapper[4773]: I0122 14:46:32.990968 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="extract-utilities" Jan 22 14:46:32 crc kubenswrapper[4773]: E0122 14:46:32.991067 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="registry-server" Jan 22 14:46:32 crc kubenswrapper[4773]: I0122 14:46:32.991152 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="registry-server" Jan 22 14:46:32 crc kubenswrapper[4773]: I0122 14:46:32.992007 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdd02d51-c98d-43eb-ae4a-2fa6b03963a6" containerName="registry-server" Jan 22 14:46:32 crc kubenswrapper[4773]: I0122 14:46:32.995870 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.008622 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t9j8d"] Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.089964 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-catalog-content\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.090109 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kt84\" (UniqueName: \"kubernetes.io/projected/c22bde1b-c849-4be9-9f2b-140d51234bbb-kube-api-access-4kt84\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.090166 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-utilities\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.191048 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kt84\" (UniqueName: \"kubernetes.io/projected/c22bde1b-c849-4be9-9f2b-140d51234bbb-kube-api-access-4kt84\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.191130 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-utilities\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.191213 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-catalog-content\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.191721 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-catalog-content\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.191949 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-utilities\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.213132 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kt84\" (UniqueName: \"kubernetes.io/projected/c22bde1b-c849-4be9-9f2b-140d51234bbb-kube-api-access-4kt84\") pod \"redhat-operators-t9j8d\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.374259 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:33 crc kubenswrapper[4773]: I0122 14:46:33.905685 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t9j8d"] Jan 22 14:46:34 crc kubenswrapper[4773]: I0122 14:46:34.074904 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:46:34 crc kubenswrapper[4773]: I0122 14:46:34.074969 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:46:34 crc kubenswrapper[4773]: I0122 14:46:34.514618 4773 generic.go:334] "Generic (PLEG): container finished" podID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerID="0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5" exitCode=0 Jan 22 14:46:34 crc kubenswrapper[4773]: I0122 14:46:34.514696 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerDied","Data":"0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5"} Jan 22 14:46:34 crc kubenswrapper[4773]: I0122 14:46:34.514874 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerStarted","Data":"4c3e25779b690c6ce51692f291a891fd1d76a3f15af79f012228e3beca6e446c"} Jan 22 14:46:34 crc kubenswrapper[4773]: I0122 14:46:34.517416 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:46:37 crc kubenswrapper[4773]: I0122 14:46:37.781749 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerStarted","Data":"795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0"} Jan 22 14:46:42 crc kubenswrapper[4773]: I0122 14:46:42.098493 4773 generic.go:334] "Generic (PLEG): container finished" podID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerID="795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0" exitCode=0 Jan 22 14:46:42 crc kubenswrapper[4773]: I0122 14:46:42.099005 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerDied","Data":"795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0"} Jan 22 14:46:45 crc kubenswrapper[4773]: I0122 14:46:45.145725 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerStarted","Data":"d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0"} Jan 22 14:46:45 crc kubenswrapper[4773]: I0122 14:46:45.177831 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t9j8d" podStartSLOduration=3.5310020829999997 podStartE2EDuration="13.177793733s" podCreationTimestamp="2026-01-22 14:46:32 +0000 UTC" firstStartedPulling="2026-01-22 14:46:34.517149696 +0000 UTC m=+10302.095265521" lastFinishedPulling="2026-01-22 14:46:44.163941306 +0000 UTC m=+10311.742057171" observedRunningTime="2026-01-22 14:46:45.163866339 +0000 UTC m=+10312.741982194" watchObservedRunningTime="2026-01-22 14:46:45.177793733 +0000 UTC m=+10312.755909598" Jan 22 14:46:53 crc kubenswrapper[4773]: I0122 14:46:53.374569 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:53 crc kubenswrapper[4773]: I0122 14:46:53.375335 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:46:54 crc kubenswrapper[4773]: I0122 14:46:54.421183 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t9j8d" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="registry-server" probeResult="failure" output=< Jan 22 14:46:54 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:46:54 crc kubenswrapper[4773]: > Jan 22 14:47:03 crc kubenswrapper[4773]: I0122 14:47:03.476727 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:47:03 crc kubenswrapper[4773]: I0122 14:47:03.527535 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.074930 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.075325 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.075402 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.076537 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1e1def94c3cc40653286bfda7cb9eb46166973f52128d43b13de1f5af22fd912"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.076637 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://1e1def94c3cc40653286bfda7cb9eb46166973f52128d43b13de1f5af22fd912" gracePeriod=600 Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.185517 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t9j8d"] Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.463118 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="1e1def94c3cc40653286bfda7cb9eb46166973f52128d43b13de1f5af22fd912" exitCode=0 Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.463293 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"1e1def94c3cc40653286bfda7cb9eb46166973f52128d43b13de1f5af22fd912"} Jan 22 14:47:04 crc kubenswrapper[4773]: I0122 14:47:04.463747 4773 scope.go:117] "RemoveContainer" containerID="d8b35343d8146409b96654f3c3f1cb05d8b2a90add9c6ba5e091a7d18a427446" Jan 22 14:47:05 crc kubenswrapper[4773]: I0122 14:47:05.482353 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5"} Jan 22 14:47:05 crc kubenswrapper[4773]: I0122 14:47:05.482485 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t9j8d" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="registry-server" containerID="cri-o://d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0" gracePeriod=2 Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.137462 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.287183 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-utilities\") pod \"c22bde1b-c849-4be9-9f2b-140d51234bbb\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.287384 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-catalog-content\") pod \"c22bde1b-c849-4be9-9f2b-140d51234bbb\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.287497 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kt84\" (UniqueName: \"kubernetes.io/projected/c22bde1b-c849-4be9-9f2b-140d51234bbb-kube-api-access-4kt84\") pod \"c22bde1b-c849-4be9-9f2b-140d51234bbb\" (UID: \"c22bde1b-c849-4be9-9f2b-140d51234bbb\") " Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.297125 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c22bde1b-c849-4be9-9f2b-140d51234bbb-kube-api-access-4kt84" (OuterVolumeSpecName: "kube-api-access-4kt84") pod "c22bde1b-c849-4be9-9f2b-140d51234bbb" (UID: "c22bde1b-c849-4be9-9f2b-140d51234bbb"). InnerVolumeSpecName "kube-api-access-4kt84". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.305664 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-utilities" (OuterVolumeSpecName: "utilities") pod "c22bde1b-c849-4be9-9f2b-140d51234bbb" (UID: "c22bde1b-c849-4be9-9f2b-140d51234bbb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.390411 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.390742 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kt84\" (UniqueName: \"kubernetes.io/projected/c22bde1b-c849-4be9-9f2b-140d51234bbb-kube-api-access-4kt84\") on node \"crc\" DevicePath \"\"" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.431244 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c22bde1b-c849-4be9-9f2b-140d51234bbb" (UID: "c22bde1b-c849-4be9-9f2b-140d51234bbb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.497428 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c22bde1b-c849-4be9-9f2b-140d51234bbb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.511435 4773 generic.go:334] "Generic (PLEG): container finished" podID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerID="d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0" exitCode=0 Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.513105 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t9j8d" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.513276 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerDied","Data":"d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0"} Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.513335 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t9j8d" event={"ID":"c22bde1b-c849-4be9-9f2b-140d51234bbb","Type":"ContainerDied","Data":"4c3e25779b690c6ce51692f291a891fd1d76a3f15af79f012228e3beca6e446c"} Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.513357 4773 scope.go:117] "RemoveContainer" containerID="d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.555944 4773 scope.go:117] "RemoveContainer" containerID="795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.573548 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t9j8d"] Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.583323 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t9j8d"] Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.598430 4773 scope.go:117] "RemoveContainer" containerID="0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.663893 4773 scope.go:117] "RemoveContainer" containerID="d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0" Jan 22 14:47:06 crc kubenswrapper[4773]: E0122 14:47:06.671264 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0\": container with ID starting with d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0 not found: ID does not exist" containerID="d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.671330 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0"} err="failed to get container status \"d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0\": rpc error: code = NotFound desc = could not find container \"d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0\": container with ID starting with d90ed35f35e937ff03019abe559818c7a33ed39d3f3befe63efeaf1fbbeaafe0 not found: ID does not exist" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.671374 4773 scope.go:117] "RemoveContainer" containerID="795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0" Jan 22 14:47:06 crc kubenswrapper[4773]: E0122 14:47:06.671769 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0\": container with ID starting with 795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0 not found: ID does not exist" containerID="795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.671824 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0"} err="failed to get container status \"795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0\": rpc error: code = NotFound desc = could not find container \"795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0\": container with ID starting with 795eb4f93fd7f24e3ac2fcfaae9109d109368e4898026b6b55c7513a69d52aa0 not found: ID does not exist" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.671859 4773 scope.go:117] "RemoveContainer" containerID="0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5" Jan 22 14:47:06 crc kubenswrapper[4773]: E0122 14:47:06.672394 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5\": container with ID starting with 0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5 not found: ID does not exist" containerID="0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.672426 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5"} err="failed to get container status \"0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5\": rpc error: code = NotFound desc = could not find container \"0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5\": container with ID starting with 0a9d833c39fde9fbd83f7ec4876c084379abb0188f3078de5674ad411aaa54e5 not found: ID does not exist" Jan 22 14:47:06 crc kubenswrapper[4773]: I0122 14:47:06.679667 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" path="/var/lib/kubelet/pods/c22bde1b-c849-4be9-9f2b-140d51234bbb/volumes" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.078053 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bl5d8"] Jan 22 14:47:40 crc kubenswrapper[4773]: E0122 14:47:40.079328 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="extract-utilities" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.079364 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="extract-utilities" Jan 22 14:47:40 crc kubenswrapper[4773]: E0122 14:47:40.079393 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="registry-server" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.079402 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="registry-server" Jan 22 14:47:40 crc kubenswrapper[4773]: E0122 14:47:40.079418 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="extract-content" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.079426 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="extract-content" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.079799 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c22bde1b-c849-4be9-9f2b-140d51234bbb" containerName="registry-server" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.082420 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.106940 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bl5d8"] Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.244676 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd6nx\" (UniqueName: \"kubernetes.io/projected/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-kube-api-access-bd6nx\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.245335 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-utilities\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.245754 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-catalog-content\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.348460 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-catalog-content\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.348817 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd6nx\" (UniqueName: \"kubernetes.io/projected/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-kube-api-access-bd6nx\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.348967 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-utilities\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.349089 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-catalog-content\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.349315 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-utilities\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.599929 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd6nx\" (UniqueName: \"kubernetes.io/projected/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-kube-api-access-bd6nx\") pod \"certified-operators-bl5d8\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:40 crc kubenswrapper[4773]: I0122 14:47:40.752501 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:41 crc kubenswrapper[4773]: I0122 14:47:41.352395 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bl5d8"] Jan 22 14:47:42 crc kubenswrapper[4773]: I0122 14:47:42.335074 4773 generic.go:334] "Generic (PLEG): container finished" podID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerID="fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe" exitCode=0 Jan 22 14:47:42 crc kubenswrapper[4773]: I0122 14:47:42.335532 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerDied","Data":"fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe"} Jan 22 14:47:42 crc kubenswrapper[4773]: I0122 14:47:42.335595 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerStarted","Data":"04c2d530140a6ed517c113197d6312b1d2960f4c14683ed6e263d10dfd49e22b"} Jan 22 14:47:44 crc kubenswrapper[4773]: I0122 14:47:44.358585 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerStarted","Data":"3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0"} Jan 22 14:47:45 crc kubenswrapper[4773]: I0122 14:47:45.371500 4773 generic.go:334] "Generic (PLEG): container finished" podID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerID="3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0" exitCode=0 Jan 22 14:47:45 crc kubenswrapper[4773]: I0122 14:47:45.371582 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerDied","Data":"3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0"} Jan 22 14:47:46 crc kubenswrapper[4773]: I0122 14:47:46.393908 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerStarted","Data":"4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a"} Jan 22 14:47:46 crc kubenswrapper[4773]: I0122 14:47:46.428730 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bl5d8" podStartSLOduration=2.642679695 podStartE2EDuration="6.428688589s" podCreationTimestamp="2026-01-22 14:47:40 +0000 UTC" firstStartedPulling="2026-01-22 14:47:42.339002626 +0000 UTC m=+10369.917118491" lastFinishedPulling="2026-01-22 14:47:46.12501156 +0000 UTC m=+10373.703127385" observedRunningTime="2026-01-22 14:47:46.417983646 +0000 UTC m=+10373.996099471" watchObservedRunningTime="2026-01-22 14:47:46.428688589 +0000 UTC m=+10374.006804414" Jan 22 14:47:50 crc kubenswrapper[4773]: I0122 14:47:50.753110 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:50 crc kubenswrapper[4773]: I0122 14:47:50.754139 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:50 crc kubenswrapper[4773]: I0122 14:47:50.850204 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:51 crc kubenswrapper[4773]: I0122 14:47:51.539080 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:52 crc kubenswrapper[4773]: I0122 14:47:52.673909 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bl5d8"] Jan 22 14:47:53 crc kubenswrapper[4773]: I0122 14:47:53.487021 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bl5d8" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="registry-server" containerID="cri-o://4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a" gracePeriod=2 Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.528494 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.548415 4773 generic.go:334] "Generic (PLEG): container finished" podID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerID="4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a" exitCode=0 Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.548481 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerDied","Data":"4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a"} Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.548517 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bl5d8" event={"ID":"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608","Type":"ContainerDied","Data":"04c2d530140a6ed517c113197d6312b1d2960f4c14683ed6e263d10dfd49e22b"} Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.548541 4773 scope.go:117] "RemoveContainer" containerID="4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.549342 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bl5d8" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.598658 4773 scope.go:117] "RemoveContainer" containerID="3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.616980 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd6nx\" (UniqueName: \"kubernetes.io/projected/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-kube-api-access-bd6nx\") pod \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.617489 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-utilities\") pod \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.617680 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-catalog-content\") pod \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\" (UID: \"aaf00f8f-9483-4ba1-9a93-ca57fa2f4608\") " Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.618329 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-utilities" (OuterVolumeSpecName: "utilities") pod "aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" (UID: "aaf00f8f-9483-4ba1-9a93-ca57fa2f4608"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.618943 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.626243 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-kube-api-access-bd6nx" (OuterVolumeSpecName: "kube-api-access-bd6nx") pod "aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" (UID: "aaf00f8f-9483-4ba1-9a93-ca57fa2f4608"). InnerVolumeSpecName "kube-api-access-bd6nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.626919 4773 scope.go:117] "RemoveContainer" containerID="fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.669537 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" (UID: "aaf00f8f-9483-4ba1-9a93-ca57fa2f4608"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.709866 4773 scope.go:117] "RemoveContainer" containerID="4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a" Jan 22 14:47:55 crc kubenswrapper[4773]: E0122 14:47:55.710620 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a\": container with ID starting with 4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a not found: ID does not exist" containerID="4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.710682 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a"} err="failed to get container status \"4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a\": rpc error: code = NotFound desc = could not find container \"4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a\": container with ID starting with 4024364e8330a516cfadcd95793d3a10003a2c14e23e5fb0ecc0b0e523ae650a not found: ID does not exist" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.711057 4773 scope.go:117] "RemoveContainer" containerID="3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0" Jan 22 14:47:55 crc kubenswrapper[4773]: E0122 14:47:55.711453 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0\": container with ID starting with 3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0 not found: ID does not exist" containerID="3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.711500 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0"} err="failed to get container status \"3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0\": rpc error: code = NotFound desc = could not find container \"3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0\": container with ID starting with 3e9c3db109b487a0ed71111008e72bd30d2e417c64d1308075b0079c7980a8a0 not found: ID does not exist" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.711535 4773 scope.go:117] "RemoveContainer" containerID="fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe" Jan 22 14:47:55 crc kubenswrapper[4773]: E0122 14:47:55.712243 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe\": container with ID starting with fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe not found: ID does not exist" containerID="fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.712270 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe"} err="failed to get container status \"fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe\": rpc error: code = NotFound desc = could not find container \"fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe\": container with ID starting with fdb30b2ff866150b63a42973f6d997a60b5068a9f979b6f8edfab87baffbbfbe not found: ID does not exist" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.721049 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.721081 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd6nx\" (UniqueName: \"kubernetes.io/projected/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608-kube-api-access-bd6nx\") on node \"crc\" DevicePath \"\"" Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.920345 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bl5d8"] Jan 22 14:47:55 crc kubenswrapper[4773]: I0122 14:47:55.947389 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bl5d8"] Jan 22 14:47:56 crc kubenswrapper[4773]: I0122 14:47:56.675421 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" path="/var/lib/kubelet/pods/aaf00f8f-9483-4ba1-9a93-ca57fa2f4608/volumes" Jan 22 14:49:04 crc kubenswrapper[4773]: I0122 14:49:04.074028 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:49:04 crc kubenswrapper[4773]: I0122 14:49:04.074702 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:49:34 crc kubenswrapper[4773]: I0122 14:49:34.074267 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:49:34 crc kubenswrapper[4773]: I0122 14:49:34.075127 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:50:04 crc kubenswrapper[4773]: I0122 14:50:04.074217 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:50:04 crc kubenswrapper[4773]: I0122 14:50:04.074697 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:50:04 crc kubenswrapper[4773]: I0122 14:50:04.074752 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:50:04 crc kubenswrapper[4773]: I0122 14:50:04.075783 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:50:04 crc kubenswrapper[4773]: I0122 14:50:04.075850 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" gracePeriod=600 Jan 22 14:50:05 crc kubenswrapper[4773]: E0122 14:50:05.000740 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:50:05 crc kubenswrapper[4773]: I0122 14:50:05.289614 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" exitCode=0 Jan 22 14:50:05 crc kubenswrapper[4773]: I0122 14:50:05.289761 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5"} Jan 22 14:50:05 crc kubenswrapper[4773]: I0122 14:50:05.290509 4773 scope.go:117] "RemoveContainer" containerID="1e1def94c3cc40653286bfda7cb9eb46166973f52128d43b13de1f5af22fd912" Jan 22 14:50:05 crc kubenswrapper[4773]: I0122 14:50:05.291570 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:50:05 crc kubenswrapper[4773]: E0122 14:50:05.291989 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:50:19 crc kubenswrapper[4773]: I0122 14:50:19.666424 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:50:19 crc kubenswrapper[4773]: E0122 14:50:19.667324 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:50:31 crc kubenswrapper[4773]: I0122 14:50:31.658572 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:50:31 crc kubenswrapper[4773]: E0122 14:50:31.659659 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:50:42 crc kubenswrapper[4773]: I0122 14:50:42.690440 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:50:42 crc kubenswrapper[4773]: E0122 14:50:42.691086 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:50:54 crc kubenswrapper[4773]: I0122 14:50:54.658453 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:50:54 crc kubenswrapper[4773]: E0122 14:50:54.659398 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:51:06 crc kubenswrapper[4773]: I0122 14:51:06.658236 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:51:06 crc kubenswrapper[4773]: E0122 14:51:06.659166 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.192001 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p452n"] Jan 22 14:51:12 crc kubenswrapper[4773]: E0122 14:51:12.193571 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="extract-content" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.193604 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="extract-content" Jan 22 14:51:12 crc kubenswrapper[4773]: E0122 14:51:12.193706 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="registry-server" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.193720 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="registry-server" Jan 22 14:51:12 crc kubenswrapper[4773]: E0122 14:51:12.193744 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="extract-utilities" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.193758 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="extract-utilities" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.194119 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaf00f8f-9483-4ba1-9a93-ca57fa2f4608" containerName="registry-server" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.197181 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.212032 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p452n"] Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.300856 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfrbs\" (UniqueName: \"kubernetes.io/projected/ee4c934e-10c7-4325-b9de-700317955f6e-kube-api-access-lfrbs\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.300992 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-utilities\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.301180 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-catalog-content\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.404337 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfrbs\" (UniqueName: \"kubernetes.io/projected/ee4c934e-10c7-4325-b9de-700317955f6e-kube-api-access-lfrbs\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.404451 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-utilities\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.404619 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-catalog-content\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.407097 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-utilities\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.408094 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-catalog-content\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.432276 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfrbs\" (UniqueName: \"kubernetes.io/projected/ee4c934e-10c7-4325-b9de-700317955f6e-kube-api-access-lfrbs\") pod \"redhat-marketplace-p452n\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:12 crc kubenswrapper[4773]: I0122 14:51:12.541130 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:13 crc kubenswrapper[4773]: I0122 14:51:13.234031 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p452n"] Jan 22 14:51:13 crc kubenswrapper[4773]: I0122 14:51:13.359396 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerStarted","Data":"1578f95dac25823f4444bbfa853611404c89a1976c55e623c976f73d9973e740"} Jan 22 14:51:14 crc kubenswrapper[4773]: I0122 14:51:14.379330 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee4c934e-10c7-4325-b9de-700317955f6e" containerID="4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1" exitCode=0 Jan 22 14:51:14 crc kubenswrapper[4773]: I0122 14:51:14.379422 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerDied","Data":"4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1"} Jan 22 14:51:18 crc kubenswrapper[4773]: I0122 14:51:18.658507 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:51:18 crc kubenswrapper[4773]: E0122 14:51:18.661388 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:51:19 crc kubenswrapper[4773]: I0122 14:51:19.465134 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerStarted","Data":"4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0"} Jan 22 14:51:20 crc kubenswrapper[4773]: I0122 14:51:20.481568 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee4c934e-10c7-4325-b9de-700317955f6e" containerID="4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0" exitCode=0 Jan 22 14:51:20 crc kubenswrapper[4773]: I0122 14:51:20.481623 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerDied","Data":"4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0"} Jan 22 14:51:22 crc kubenswrapper[4773]: I0122 14:51:22.502994 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerStarted","Data":"e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49"} Jan 22 14:51:22 crc kubenswrapper[4773]: I0122 14:51:22.540330 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p452n" podStartSLOduration=3.747526628 podStartE2EDuration="10.540266394s" podCreationTimestamp="2026-01-22 14:51:12 +0000 UTC" firstStartedPulling="2026-01-22 14:51:14.384034633 +0000 UTC m=+10581.962150458" lastFinishedPulling="2026-01-22 14:51:21.176774359 +0000 UTC m=+10588.754890224" observedRunningTime="2026-01-22 14:51:22.537884257 +0000 UTC m=+10590.116000082" watchObservedRunningTime="2026-01-22 14:51:22.540266394 +0000 UTC m=+10590.118382239" Jan 22 14:51:22 crc kubenswrapper[4773]: I0122 14:51:22.542914 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:22 crc kubenswrapper[4773]: I0122 14:51:22.542958 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:23 crc kubenswrapper[4773]: I0122 14:51:23.595091 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-p452n" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="registry-server" probeResult="failure" output=< Jan 22 14:51:23 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:51:23 crc kubenswrapper[4773]: > Jan 22 14:51:32 crc kubenswrapper[4773]: I0122 14:51:32.728379 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:32 crc kubenswrapper[4773]: I0122 14:51:32.815606 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:33 crc kubenswrapper[4773]: I0122 14:51:33.005588 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p452n"] Jan 22 14:51:33 crc kubenswrapper[4773]: I0122 14:51:33.658829 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:51:33 crc kubenswrapper[4773]: E0122 14:51:33.659339 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:51:34 crc kubenswrapper[4773]: I0122 14:51:34.718304 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p452n" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="registry-server" containerID="cri-o://e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49" gracePeriod=2 Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.414547 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.458461 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-catalog-content\") pod \"ee4c934e-10c7-4325-b9de-700317955f6e\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.458647 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfrbs\" (UniqueName: \"kubernetes.io/projected/ee4c934e-10c7-4325-b9de-700317955f6e-kube-api-access-lfrbs\") pod \"ee4c934e-10c7-4325-b9de-700317955f6e\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.458701 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-utilities\") pod \"ee4c934e-10c7-4325-b9de-700317955f6e\" (UID: \"ee4c934e-10c7-4325-b9de-700317955f6e\") " Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.459886 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-utilities" (OuterVolumeSpecName: "utilities") pod "ee4c934e-10c7-4325-b9de-700317955f6e" (UID: "ee4c934e-10c7-4325-b9de-700317955f6e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.489978 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee4c934e-10c7-4325-b9de-700317955f6e" (UID: "ee4c934e-10c7-4325-b9de-700317955f6e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.497491 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee4c934e-10c7-4325-b9de-700317955f6e-kube-api-access-lfrbs" (OuterVolumeSpecName: "kube-api-access-lfrbs") pod "ee4c934e-10c7-4325-b9de-700317955f6e" (UID: "ee4c934e-10c7-4325-b9de-700317955f6e"). InnerVolumeSpecName "kube-api-access-lfrbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.561698 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.561740 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfrbs\" (UniqueName: \"kubernetes.io/projected/ee4c934e-10c7-4325-b9de-700317955f6e-kube-api-access-lfrbs\") on node \"crc\" DevicePath \"\"" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.561756 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee4c934e-10c7-4325-b9de-700317955f6e-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.748201 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee4c934e-10c7-4325-b9de-700317955f6e" containerID="e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49" exitCode=0 Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.748549 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p452n" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.748581 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerDied","Data":"e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49"} Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.750240 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p452n" event={"ID":"ee4c934e-10c7-4325-b9de-700317955f6e","Type":"ContainerDied","Data":"1578f95dac25823f4444bbfa853611404c89a1976c55e623c976f73d9973e740"} Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.750319 4773 scope.go:117] "RemoveContainer" containerID="e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.811559 4773 scope.go:117] "RemoveContainer" containerID="4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.820885 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p452n"] Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.831694 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p452n"] Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.840964 4773 scope.go:117] "RemoveContainer" containerID="4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.928767 4773 scope.go:117] "RemoveContainer" containerID="e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49" Jan 22 14:51:35 crc kubenswrapper[4773]: E0122 14:51:35.929424 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49\": container with ID starting with e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49 not found: ID does not exist" containerID="e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.929475 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49"} err="failed to get container status \"e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49\": rpc error: code = NotFound desc = could not find container \"e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49\": container with ID starting with e03a0b9e0de5edc441e338e966ecd58465a5144f149350b8d6037871e502ca49 not found: ID does not exist" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.929502 4773 scope.go:117] "RemoveContainer" containerID="4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0" Jan 22 14:51:35 crc kubenswrapper[4773]: E0122 14:51:35.933834 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0\": container with ID starting with 4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0 not found: ID does not exist" containerID="4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.933874 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0"} err="failed to get container status \"4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0\": rpc error: code = NotFound desc = could not find container \"4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0\": container with ID starting with 4ac62a5fe50666101e02d27174ed3965762f449859b66af03941428f47ef7db0 not found: ID does not exist" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.933905 4773 scope.go:117] "RemoveContainer" containerID="4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1" Jan 22 14:51:35 crc kubenswrapper[4773]: E0122 14:51:35.934333 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1\": container with ID starting with 4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1 not found: ID does not exist" containerID="4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1" Jan 22 14:51:35 crc kubenswrapper[4773]: I0122 14:51:35.934396 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1"} err="failed to get container status \"4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1\": rpc error: code = NotFound desc = could not find container \"4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1\": container with ID starting with 4881310c1df3f0b60851b79221b0d0e645e63fa594fdef9adf049ee9170b00f1 not found: ID does not exist" Jan 22 14:51:36 crc kubenswrapper[4773]: I0122 14:51:36.674697 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" path="/var/lib/kubelet/pods/ee4c934e-10c7-4325-b9de-700317955f6e/volumes" Jan 22 14:51:44 crc kubenswrapper[4773]: I0122 14:51:44.658949 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:51:44 crc kubenswrapper[4773]: E0122 14:51:44.660034 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:51:58 crc kubenswrapper[4773]: I0122 14:51:58.658370 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:51:58 crc kubenswrapper[4773]: E0122 14:51:58.659238 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:52:09 crc kubenswrapper[4773]: I0122 14:52:09.659082 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:52:09 crc kubenswrapper[4773]: E0122 14:52:09.660430 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:52:24 crc kubenswrapper[4773]: I0122 14:52:24.659071 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:52:24 crc kubenswrapper[4773]: E0122 14:52:24.659934 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:52:38 crc kubenswrapper[4773]: I0122 14:52:38.658007 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:52:38 crc kubenswrapper[4773]: E0122 14:52:38.658913 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:52:52 crc kubenswrapper[4773]: I0122 14:52:52.665392 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:52:52 crc kubenswrapper[4773]: E0122 14:52:52.666190 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:53:07 crc kubenswrapper[4773]: I0122 14:53:07.659166 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:53:07 crc kubenswrapper[4773]: E0122 14:53:07.660068 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:53:20 crc kubenswrapper[4773]: I0122 14:53:20.670417 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:53:20 crc kubenswrapper[4773]: E0122 14:53:20.672478 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:53:35 crc kubenswrapper[4773]: I0122 14:53:35.658817 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:53:35 crc kubenswrapper[4773]: E0122 14:53:35.659839 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:53:49 crc kubenswrapper[4773]: I0122 14:53:49.658812 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:53:49 crc kubenswrapper[4773]: E0122 14:53:49.661946 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:54:03 crc kubenswrapper[4773]: I0122 14:54:03.659597 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:54:03 crc kubenswrapper[4773]: E0122 14:54:03.660709 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:54:14 crc kubenswrapper[4773]: I0122 14:54:14.658744 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:54:14 crc kubenswrapper[4773]: E0122 14:54:14.659458 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:54:26 crc kubenswrapper[4773]: I0122 14:54:26.660641 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:54:26 crc kubenswrapper[4773]: E0122 14:54:26.662110 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:54:40 crc kubenswrapper[4773]: I0122 14:54:40.658862 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:54:40 crc kubenswrapper[4773]: E0122 14:54:40.659703 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:54:52 crc kubenswrapper[4773]: I0122 14:54:52.671699 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:54:52 crc kubenswrapper[4773]: E0122 14:54:52.673951 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 14:55:07 crc kubenswrapper[4773]: I0122 14:55:07.658735 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:55:08 crc kubenswrapper[4773]: I0122 14:55:08.717899 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"99cb1843fbb7f938fc4868c933a2003b999605e434e479325c259424ac7ace6f"} Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:18.999769 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d2v7n"] Jan 22 14:56:19 crc kubenswrapper[4773]: E0122 14:56:19.000745 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="extract-utilities" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.000759 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="extract-utilities" Jan 22 14:56:19 crc kubenswrapper[4773]: E0122 14:56:19.000780 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="extract-content" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.000786 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="extract-content" Jan 22 14:56:19 crc kubenswrapper[4773]: E0122 14:56:19.000807 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="registry-server" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.000815 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="registry-server" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.001028 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee4c934e-10c7-4325-b9de-700317955f6e" containerName="registry-server" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.002661 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.018333 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d2v7n"] Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.026524 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-catalog-content\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.026976 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-utilities\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.027320 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79t68\" (UniqueName: \"kubernetes.io/projected/ee20aecc-4a3f-41dc-aa18-f716ea13171c-kube-api-access-79t68\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.130760 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-utilities\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.131041 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79t68\" (UniqueName: \"kubernetes.io/projected/ee20aecc-4a3f-41dc-aa18-f716ea13171c-kube-api-access-79t68\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.131205 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-catalog-content\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.131464 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-utilities\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.131698 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-catalog-content\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.155260 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79t68\" (UniqueName: \"kubernetes.io/projected/ee20aecc-4a3f-41dc-aa18-f716ea13171c-kube-api-access-79t68\") pod \"community-operators-d2v7n\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.347472 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:19 crc kubenswrapper[4773]: I0122 14:56:19.945514 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d2v7n"] Jan 22 14:56:20 crc kubenswrapper[4773]: I0122 14:56:20.702832 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerID="e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5" exitCode=0 Jan 22 14:56:20 crc kubenswrapper[4773]: I0122 14:56:20.703215 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerDied","Data":"e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5"} Jan 22 14:56:20 crc kubenswrapper[4773]: I0122 14:56:20.703262 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerStarted","Data":"92af1a43034c900fef612df32380d23ff125dcf4e758df5f0a3c4e15005fb906"} Jan 22 14:56:20 crc kubenswrapper[4773]: I0122 14:56:20.706890 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:56:21 crc kubenswrapper[4773]: I0122 14:56:21.717547 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerStarted","Data":"4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9"} Jan 22 14:56:22 crc kubenswrapper[4773]: I0122 14:56:22.741547 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerID="4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9" exitCode=0 Jan 22 14:56:22 crc kubenswrapper[4773]: I0122 14:56:22.741871 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerDied","Data":"4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9"} Jan 22 14:56:24 crc kubenswrapper[4773]: I0122 14:56:24.772870 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerStarted","Data":"6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02"} Jan 22 14:56:24 crc kubenswrapper[4773]: I0122 14:56:24.801497 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d2v7n" podStartSLOduration=3.042241774 podStartE2EDuration="6.8014662s" podCreationTimestamp="2026-01-22 14:56:18 +0000 UTC" firstStartedPulling="2026-01-22 14:56:20.706214835 +0000 UTC m=+10888.284330700" lastFinishedPulling="2026-01-22 14:56:24.465439311 +0000 UTC m=+10892.043555126" observedRunningTime="2026-01-22 14:56:24.792887798 +0000 UTC m=+10892.371003663" watchObservedRunningTime="2026-01-22 14:56:24.8014662 +0000 UTC m=+10892.379582025" Jan 22 14:56:29 crc kubenswrapper[4773]: I0122 14:56:29.348101 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:29 crc kubenswrapper[4773]: I0122 14:56:29.348585 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:29 crc kubenswrapper[4773]: I0122 14:56:29.435734 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:29 crc kubenswrapper[4773]: I0122 14:56:29.919868 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:29 crc kubenswrapper[4773]: I0122 14:56:29.987416 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d2v7n"] Jan 22 14:56:31 crc kubenswrapper[4773]: I0122 14:56:31.874643 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d2v7n" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="registry-server" containerID="cri-o://6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02" gracePeriod=2 Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.396861 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.468895 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79t68\" (UniqueName: \"kubernetes.io/projected/ee20aecc-4a3f-41dc-aa18-f716ea13171c-kube-api-access-79t68\") pod \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.468975 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-catalog-content\") pod \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.469027 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-utilities\") pod \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\" (UID: \"ee20aecc-4a3f-41dc-aa18-f716ea13171c\") " Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.470621 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-utilities" (OuterVolumeSpecName: "utilities") pod "ee20aecc-4a3f-41dc-aa18-f716ea13171c" (UID: "ee20aecc-4a3f-41dc-aa18-f716ea13171c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.479866 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee20aecc-4a3f-41dc-aa18-f716ea13171c-kube-api-access-79t68" (OuterVolumeSpecName: "kube-api-access-79t68") pod "ee20aecc-4a3f-41dc-aa18-f716ea13171c" (UID: "ee20aecc-4a3f-41dc-aa18-f716ea13171c"). InnerVolumeSpecName "kube-api-access-79t68". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.571642 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.571681 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79t68\" (UniqueName: \"kubernetes.io/projected/ee20aecc-4a3f-41dc-aa18-f716ea13171c-kube-api-access-79t68\") on node \"crc\" DevicePath \"\"" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.599393 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee20aecc-4a3f-41dc-aa18-f716ea13171c" (UID: "ee20aecc-4a3f-41dc-aa18-f716ea13171c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.672929 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee20aecc-4a3f-41dc-aa18-f716ea13171c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.891525 4773 generic.go:334] "Generic (PLEG): container finished" podID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerID="6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02" exitCode=0 Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.891601 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerDied","Data":"6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02"} Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.891832 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2v7n" event={"ID":"ee20aecc-4a3f-41dc-aa18-f716ea13171c","Type":"ContainerDied","Data":"92af1a43034c900fef612df32380d23ff125dcf4e758df5f0a3c4e15005fb906"} Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.891843 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2v7n" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.891860 4773 scope.go:117] "RemoveContainer" containerID="6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.942267 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d2v7n"] Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.957823 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d2v7n"] Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.974060 4773 scope.go:117] "RemoveContainer" containerID="4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9" Jan 22 14:56:32 crc kubenswrapper[4773]: I0122 14:56:32.997737 4773 scope.go:117] "RemoveContainer" containerID="e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5" Jan 22 14:56:33 crc kubenswrapper[4773]: I0122 14:56:33.063979 4773 scope.go:117] "RemoveContainer" containerID="6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02" Jan 22 14:56:33 crc kubenswrapper[4773]: E0122 14:56:33.064586 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02\": container with ID starting with 6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02 not found: ID does not exist" containerID="6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02" Jan 22 14:56:33 crc kubenswrapper[4773]: I0122 14:56:33.064634 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02"} err="failed to get container status \"6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02\": rpc error: code = NotFound desc = could not find container \"6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02\": container with ID starting with 6c71798614e2fbc55a4f9c3342be637e60acd662ece80483456984a9f443cb02 not found: ID does not exist" Jan 22 14:56:33 crc kubenswrapper[4773]: I0122 14:56:33.064667 4773 scope.go:117] "RemoveContainer" containerID="4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9" Jan 22 14:56:33 crc kubenswrapper[4773]: E0122 14:56:33.065033 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9\": container with ID starting with 4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9 not found: ID does not exist" containerID="4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9" Jan 22 14:56:33 crc kubenswrapper[4773]: I0122 14:56:33.065092 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9"} err="failed to get container status \"4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9\": rpc error: code = NotFound desc = could not find container \"4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9\": container with ID starting with 4c3c27dc1b852829c7e9cf14de25bb282e13774132b5440964380647843d33a9 not found: ID does not exist" Jan 22 14:56:33 crc kubenswrapper[4773]: I0122 14:56:33.065127 4773 scope.go:117] "RemoveContainer" containerID="e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5" Jan 22 14:56:33 crc kubenswrapper[4773]: E0122 14:56:33.065496 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5\": container with ID starting with e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5 not found: ID does not exist" containerID="e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5" Jan 22 14:56:33 crc kubenswrapper[4773]: I0122 14:56:33.065534 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5"} err="failed to get container status \"e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5\": rpc error: code = NotFound desc = could not find container \"e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5\": container with ID starting with e6b86c466c5c0d7a3a3a8c4f1719bf8314365faf350a9075d96eb98fbf0eacb5 not found: ID does not exist" Jan 22 14:56:34 crc kubenswrapper[4773]: I0122 14:56:34.684237 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" path="/var/lib/kubelet/pods/ee20aecc-4a3f-41dc-aa18-f716ea13171c/volumes" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.204680 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h5rn7"] Jan 22 14:57:32 crc kubenswrapper[4773]: E0122 14:57:32.205643 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="registry-server" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.205662 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="registry-server" Jan 22 14:57:32 crc kubenswrapper[4773]: E0122 14:57:32.205676 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="extract-content" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.205683 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="extract-content" Jan 22 14:57:32 crc kubenswrapper[4773]: E0122 14:57:32.205724 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="extract-utilities" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.205732 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="extract-utilities" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.206021 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee20aecc-4a3f-41dc-aa18-f716ea13171c" containerName="registry-server" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.208065 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.217873 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h5rn7"] Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.365348 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-catalog-content\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.365571 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2qxz\" (UniqueName: \"kubernetes.io/projected/5f0394de-77b2-4669-99e8-18906e894874-kube-api-access-j2qxz\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.365981 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-utilities\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.468367 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2qxz\" (UniqueName: \"kubernetes.io/projected/5f0394de-77b2-4669-99e8-18906e894874-kube-api-access-j2qxz\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.468528 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-utilities\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.468737 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-catalog-content\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.469430 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-catalog-content\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.470188 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-utilities\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.503421 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2qxz\" (UniqueName: \"kubernetes.io/projected/5f0394de-77b2-4669-99e8-18906e894874-kube-api-access-j2qxz\") pod \"redhat-operators-h5rn7\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.531905 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:32 crc kubenswrapper[4773]: I0122 14:57:32.907261 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h5rn7"] Jan 22 14:57:33 crc kubenswrapper[4773]: I0122 14:57:33.622101 4773 generic.go:334] "Generic (PLEG): container finished" podID="5f0394de-77b2-4669-99e8-18906e894874" containerID="a17e048d5e8c9f385e8b921e7b038663466d4f91e2fe6a6fd6243e5074549ca6" exitCode=0 Jan 22 14:57:33 crc kubenswrapper[4773]: I0122 14:57:33.622214 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerDied","Data":"a17e048d5e8c9f385e8b921e7b038663466d4f91e2fe6a6fd6243e5074549ca6"} Jan 22 14:57:33 crc kubenswrapper[4773]: I0122 14:57:33.622445 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerStarted","Data":"56f5ddbdc2b9e7a1a7675946e191e1206b36c1e8ad718808cc398a2398fba856"} Jan 22 14:57:34 crc kubenswrapper[4773]: I0122 14:57:34.074953 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:57:34 crc kubenswrapper[4773]: I0122 14:57:34.075030 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:57:35 crc kubenswrapper[4773]: I0122 14:57:35.645800 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerStarted","Data":"2cb537a60875a3c702f4115e67b736b166be2a6d75626b329d460a03b875bab7"} Jan 22 14:57:37 crc kubenswrapper[4773]: I0122 14:57:37.669168 4773 generic.go:334] "Generic (PLEG): container finished" podID="5f0394de-77b2-4669-99e8-18906e894874" containerID="2cb537a60875a3c702f4115e67b736b166be2a6d75626b329d460a03b875bab7" exitCode=0 Jan 22 14:57:37 crc kubenswrapper[4773]: I0122 14:57:37.669247 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerDied","Data":"2cb537a60875a3c702f4115e67b736b166be2a6d75626b329d460a03b875bab7"} Jan 22 14:57:38 crc kubenswrapper[4773]: I0122 14:57:38.679838 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerStarted","Data":"b21fe309d97644ad8e41b9af6d398023ce08ee91d11ab9392c2658aa1e62209d"} Jan 22 14:57:38 crc kubenswrapper[4773]: I0122 14:57:38.699131 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h5rn7" podStartSLOduration=2.201219921 podStartE2EDuration="6.699111376s" podCreationTimestamp="2026-01-22 14:57:32 +0000 UTC" firstStartedPulling="2026-01-22 14:57:33.624500506 +0000 UTC m=+10961.202616331" lastFinishedPulling="2026-01-22 14:57:38.122391961 +0000 UTC m=+10965.700507786" observedRunningTime="2026-01-22 14:57:38.696808591 +0000 UTC m=+10966.274924436" watchObservedRunningTime="2026-01-22 14:57:38.699111376 +0000 UTC m=+10966.277227201" Jan 22 14:57:42 crc kubenswrapper[4773]: I0122 14:57:42.532581 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:42 crc kubenswrapper[4773]: I0122 14:57:42.533023 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:43 crc kubenswrapper[4773]: I0122 14:57:43.585553 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h5rn7" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="registry-server" probeResult="failure" output=< Jan 22 14:57:43 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 14:57:43 crc kubenswrapper[4773]: > Jan 22 14:57:52 crc kubenswrapper[4773]: I0122 14:57:52.635752 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:52 crc kubenswrapper[4773]: I0122 14:57:52.740331 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:52 crc kubenswrapper[4773]: I0122 14:57:52.910564 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h5rn7"] Jan 22 14:57:53 crc kubenswrapper[4773]: I0122 14:57:53.898150 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h5rn7" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="registry-server" containerID="cri-o://b21fe309d97644ad8e41b9af6d398023ce08ee91d11ab9392c2658aa1e62209d" gracePeriod=2 Jan 22 14:57:54 crc kubenswrapper[4773]: I0122 14:57:54.913328 4773 generic.go:334] "Generic (PLEG): container finished" podID="5f0394de-77b2-4669-99e8-18906e894874" containerID="b21fe309d97644ad8e41b9af6d398023ce08ee91d11ab9392c2658aa1e62209d" exitCode=0 Jan 22 14:57:54 crc kubenswrapper[4773]: I0122 14:57:54.913434 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerDied","Data":"b21fe309d97644ad8e41b9af6d398023ce08ee91d11ab9392c2658aa1e62209d"} Jan 22 14:57:54 crc kubenswrapper[4773]: I0122 14:57:54.913770 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h5rn7" event={"ID":"5f0394de-77b2-4669-99e8-18906e894874","Type":"ContainerDied","Data":"56f5ddbdc2b9e7a1a7675946e191e1206b36c1e8ad718808cc398a2398fba856"} Jan 22 14:57:54 crc kubenswrapper[4773]: I0122 14:57:54.913792 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56f5ddbdc2b9e7a1a7675946e191e1206b36c1e8ad718808cc398a2398fba856" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.018858 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.209209 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-utilities\") pod \"5f0394de-77b2-4669-99e8-18906e894874\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.209347 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2qxz\" (UniqueName: \"kubernetes.io/projected/5f0394de-77b2-4669-99e8-18906e894874-kube-api-access-j2qxz\") pod \"5f0394de-77b2-4669-99e8-18906e894874\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.209422 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-catalog-content\") pod \"5f0394de-77b2-4669-99e8-18906e894874\" (UID: \"5f0394de-77b2-4669-99e8-18906e894874\") " Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.210521 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-utilities" (OuterVolumeSpecName: "utilities") pod "5f0394de-77b2-4669-99e8-18906e894874" (UID: "5f0394de-77b2-4669-99e8-18906e894874"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.214725 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f0394de-77b2-4669-99e8-18906e894874-kube-api-access-j2qxz" (OuterVolumeSpecName: "kube-api-access-j2qxz") pod "5f0394de-77b2-4669-99e8-18906e894874" (UID: "5f0394de-77b2-4669-99e8-18906e894874"). InnerVolumeSpecName "kube-api-access-j2qxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.312096 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.312138 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2qxz\" (UniqueName: \"kubernetes.io/projected/5f0394de-77b2-4669-99e8-18906e894874-kube-api-access-j2qxz\") on node \"crc\" DevicePath \"\"" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.332579 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f0394de-77b2-4669-99e8-18906e894874" (UID: "5f0394de-77b2-4669-99e8-18906e894874"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.414870 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f0394de-77b2-4669-99e8-18906e894874-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.929073 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h5rn7" Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.984263 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h5rn7"] Jan 22 14:57:55 crc kubenswrapper[4773]: I0122 14:57:55.994885 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h5rn7"] Jan 22 14:57:56 crc kubenswrapper[4773]: I0122 14:57:56.685146 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f0394de-77b2-4669-99e8-18906e894874" path="/var/lib/kubelet/pods/5f0394de-77b2-4669-99e8-18906e894874/volumes" Jan 22 14:58:04 crc kubenswrapper[4773]: I0122 14:58:04.073947 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:58:04 crc kubenswrapper[4773]: I0122 14:58:04.074459 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.508932 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m2pnx"] Jan 22 14:58:30 crc kubenswrapper[4773]: E0122 14:58:30.510027 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="extract-content" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.510043 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="extract-content" Jan 22 14:58:30 crc kubenswrapper[4773]: E0122 14:58:30.510065 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="registry-server" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.510072 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="registry-server" Jan 22 14:58:30 crc kubenswrapper[4773]: E0122 14:58:30.510116 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="extract-utilities" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.510125 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="extract-utilities" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.510438 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f0394de-77b2-4669-99e8-18906e894874" containerName="registry-server" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.514616 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.537894 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m2pnx"] Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.607050 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-catalog-content\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.607095 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s8pv\" (UniqueName: \"kubernetes.io/projected/78711ca6-0402-43e5-a61c-786b55a789b9-kube-api-access-8s8pv\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.607325 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-utilities\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.709620 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-catalog-content\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.709680 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s8pv\" (UniqueName: \"kubernetes.io/projected/78711ca6-0402-43e5-a61c-786b55a789b9-kube-api-access-8s8pv\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.709748 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-utilities\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.710264 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-catalog-content\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.710356 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-utilities\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.736644 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s8pv\" (UniqueName: \"kubernetes.io/projected/78711ca6-0402-43e5-a61c-786b55a789b9-kube-api-access-8s8pv\") pod \"certified-operators-m2pnx\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:30 crc kubenswrapper[4773]: I0122 14:58:30.855388 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:31 crc kubenswrapper[4773]: I0122 14:58:31.432963 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m2pnx"] Jan 22 14:58:31 crc kubenswrapper[4773]: I0122 14:58:31.472537 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2pnx" event={"ID":"78711ca6-0402-43e5-a61c-786b55a789b9","Type":"ContainerStarted","Data":"ecd843e113b31384196aa1898d5e3013527a69085504417a76303de053657ad6"} Jan 22 14:58:32 crc kubenswrapper[4773]: I0122 14:58:32.489359 4773 generic.go:334] "Generic (PLEG): container finished" podID="78711ca6-0402-43e5-a61c-786b55a789b9" containerID="71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1" exitCode=0 Jan 22 14:58:32 crc kubenswrapper[4773]: I0122 14:58:32.489825 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2pnx" event={"ID":"78711ca6-0402-43e5-a61c-786b55a789b9","Type":"ContainerDied","Data":"71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1"} Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.079499 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.081468 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.081649 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.082667 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99cb1843fbb7f938fc4868c933a2003b999605e434e479325c259424ac7ace6f"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.082819 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://99cb1843fbb7f938fc4868c933a2003b999605e434e479325c259424ac7ace6f" gracePeriod=600 Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.520238 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="99cb1843fbb7f938fc4868c933a2003b999605e434e479325c259424ac7ace6f" exitCode=0 Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.520404 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"99cb1843fbb7f938fc4868c933a2003b999605e434e479325c259424ac7ace6f"} Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.520448 4773 scope.go:117] "RemoveContainer" containerID="be9075b344ddfe267a1a9eb779369ae2178461895d255a784d43c629b35fcdf5" Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.526586 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2pnx" event={"ID":"78711ca6-0402-43e5-a61c-786b55a789b9","Type":"ContainerDied","Data":"5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300"} Jan 22 14:58:34 crc kubenswrapper[4773]: I0122 14:58:34.526701 4773 generic.go:334] "Generic (PLEG): container finished" podID="78711ca6-0402-43e5-a61c-786b55a789b9" containerID="5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300" exitCode=0 Jan 22 14:58:35 crc kubenswrapper[4773]: I0122 14:58:35.539738 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf"} Jan 22 14:58:36 crc kubenswrapper[4773]: I0122 14:58:36.551032 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2pnx" event={"ID":"78711ca6-0402-43e5-a61c-786b55a789b9","Type":"ContainerStarted","Data":"f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6"} Jan 22 14:58:36 crc kubenswrapper[4773]: I0122 14:58:36.583568 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m2pnx" podStartSLOduration=3.694410534 podStartE2EDuration="6.58354388s" podCreationTimestamp="2026-01-22 14:58:30 +0000 UTC" firstStartedPulling="2026-01-22 14:58:32.492505414 +0000 UTC m=+11020.070621249" lastFinishedPulling="2026-01-22 14:58:35.38163877 +0000 UTC m=+11022.959754595" observedRunningTime="2026-01-22 14:58:36.574946797 +0000 UTC m=+11024.153062622" watchObservedRunningTime="2026-01-22 14:58:36.58354388 +0000 UTC m=+11024.161659705" Jan 22 14:58:40 crc kubenswrapper[4773]: I0122 14:58:40.856565 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:40 crc kubenswrapper[4773]: I0122 14:58:40.857495 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:40 crc kubenswrapper[4773]: I0122 14:58:40.960045 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:41 crc kubenswrapper[4773]: I0122 14:58:41.677825 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:41 crc kubenswrapper[4773]: I0122 14:58:41.736707 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m2pnx"] Jan 22 14:58:43 crc kubenswrapper[4773]: I0122 14:58:43.669257 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m2pnx" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="registry-server" containerID="cri-o://f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6" gracePeriod=2 Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.222127 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.281791 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-catalog-content\") pod \"78711ca6-0402-43e5-a61c-786b55a789b9\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.282130 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8s8pv\" (UniqueName: \"kubernetes.io/projected/78711ca6-0402-43e5-a61c-786b55a789b9-kube-api-access-8s8pv\") pod \"78711ca6-0402-43e5-a61c-786b55a789b9\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.282179 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-utilities\") pod \"78711ca6-0402-43e5-a61c-786b55a789b9\" (UID: \"78711ca6-0402-43e5-a61c-786b55a789b9\") " Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.283789 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-utilities" (OuterVolumeSpecName: "utilities") pod "78711ca6-0402-43e5-a61c-786b55a789b9" (UID: "78711ca6-0402-43e5-a61c-786b55a789b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.289347 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78711ca6-0402-43e5-a61c-786b55a789b9-kube-api-access-8s8pv" (OuterVolumeSpecName: "kube-api-access-8s8pv") pod "78711ca6-0402-43e5-a61c-786b55a789b9" (UID: "78711ca6-0402-43e5-a61c-786b55a789b9"). InnerVolumeSpecName "kube-api-access-8s8pv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.360772 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78711ca6-0402-43e5-a61c-786b55a789b9" (UID: "78711ca6-0402-43e5-a61c-786b55a789b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.386199 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.386265 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8s8pv\" (UniqueName: \"kubernetes.io/projected/78711ca6-0402-43e5-a61c-786b55a789b9-kube-api-access-8s8pv\") on node \"crc\" DevicePath \"\"" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.386323 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78711ca6-0402-43e5-a61c-786b55a789b9-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.681917 4773 generic.go:334] "Generic (PLEG): container finished" podID="78711ca6-0402-43e5-a61c-786b55a789b9" containerID="f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6" exitCode=0 Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.681967 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2pnx" event={"ID":"78711ca6-0402-43e5-a61c-786b55a789b9","Type":"ContainerDied","Data":"f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6"} Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.681984 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m2pnx" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.682002 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m2pnx" event={"ID":"78711ca6-0402-43e5-a61c-786b55a789b9","Type":"ContainerDied","Data":"ecd843e113b31384196aa1898d5e3013527a69085504417a76303de053657ad6"} Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.682025 4773 scope.go:117] "RemoveContainer" containerID="f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.717091 4773 scope.go:117] "RemoveContainer" containerID="5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.750323 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m2pnx"] Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.758918 4773 scope.go:117] "RemoveContainer" containerID="71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.768175 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m2pnx"] Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.804393 4773 scope.go:117] "RemoveContainer" containerID="f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6" Jan 22 14:58:44 crc kubenswrapper[4773]: E0122 14:58:44.804893 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6\": container with ID starting with f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6 not found: ID does not exist" containerID="f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.804921 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6"} err="failed to get container status \"f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6\": rpc error: code = NotFound desc = could not find container \"f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6\": container with ID starting with f65a40783ae6960b482a518a85fa2cadca44b5678a37115473a1194acb46e5e6 not found: ID does not exist" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.804938 4773 scope.go:117] "RemoveContainer" containerID="5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300" Jan 22 14:58:44 crc kubenswrapper[4773]: E0122 14:58:44.805203 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300\": container with ID starting with 5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300 not found: ID does not exist" containerID="5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.805221 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300"} err="failed to get container status \"5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300\": rpc error: code = NotFound desc = could not find container \"5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300\": container with ID starting with 5e4ff111731923ce31c4bafb6cbcbbd5c7c6509ec95540819c56b476e1cc6300 not found: ID does not exist" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.805236 4773 scope.go:117] "RemoveContainer" containerID="71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1" Jan 22 14:58:44 crc kubenswrapper[4773]: E0122 14:58:44.805557 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1\": container with ID starting with 71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1 not found: ID does not exist" containerID="71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1" Jan 22 14:58:44 crc kubenswrapper[4773]: I0122 14:58:44.805576 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1"} err="failed to get container status \"71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1\": rpc error: code = NotFound desc = could not find container \"71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1\": container with ID starting with 71e853b686b5e551d2af9a8d8abb0de9831a3cfbf259922692ca67c371385fe1 not found: ID does not exist" Jan 22 14:58:46 crc kubenswrapper[4773]: I0122 14:58:46.674767 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" path="/var/lib/kubelet/pods/78711ca6-0402-43e5-a61c-786b55a789b9/volumes" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.168695 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578"] Jan 22 15:00:00 crc kubenswrapper[4773]: E0122 15:00:00.169685 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="extract-content" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.169700 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="extract-content" Jan 22 15:00:00 crc kubenswrapper[4773]: E0122 15:00:00.169719 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="registry-server" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.169725 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="registry-server" Jan 22 15:00:00 crc kubenswrapper[4773]: E0122 15:00:00.169741 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="extract-utilities" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.169748 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="extract-utilities" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.169962 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="78711ca6-0402-43e5-a61c-786b55a789b9" containerName="registry-server" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.170867 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.173130 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.173694 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.194336 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578"] Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.212670 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d013539e-176a-4ac4-aaef-512328b2794d-config-volume\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.212814 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d013539e-176a-4ac4-aaef-512328b2794d-secret-volume\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.212852 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-978c9\" (UniqueName: \"kubernetes.io/projected/d013539e-176a-4ac4-aaef-512328b2794d-kube-api-access-978c9\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.315584 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d013539e-176a-4ac4-aaef-512328b2794d-config-volume\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.315696 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d013539e-176a-4ac4-aaef-512328b2794d-secret-volume\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.315728 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-978c9\" (UniqueName: \"kubernetes.io/projected/d013539e-176a-4ac4-aaef-512328b2794d-kube-api-access-978c9\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.316815 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d013539e-176a-4ac4-aaef-512328b2794d-config-volume\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.322520 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d013539e-176a-4ac4-aaef-512328b2794d-secret-volume\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.334496 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-978c9\" (UniqueName: \"kubernetes.io/projected/d013539e-176a-4ac4-aaef-512328b2794d-kube-api-access-978c9\") pod \"collect-profiles-29484900-vv578\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:00 crc kubenswrapper[4773]: I0122 15:00:00.511014 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:01 crc kubenswrapper[4773]: I0122 15:00:01.025002 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578"] Jan 22 15:00:01 crc kubenswrapper[4773]: W0122 15:00:01.033628 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd013539e_176a_4ac4_aaef_512328b2794d.slice/crio-dcfec25132801d23d4cef42ab8e8235248851abe5c3850352c31cb63b78a7bb9 WatchSource:0}: Error finding container dcfec25132801d23d4cef42ab8e8235248851abe5c3850352c31cb63b78a7bb9: Status 404 returned error can't find the container with id dcfec25132801d23d4cef42ab8e8235248851abe5c3850352c31cb63b78a7bb9 Jan 22 15:00:01 crc kubenswrapper[4773]: I0122 15:00:01.974034 4773 generic.go:334] "Generic (PLEG): container finished" podID="d013539e-176a-4ac4-aaef-512328b2794d" containerID="f2ed98c56bdf99b34d1131b1bdea0785cc304f65a1df9861e8834613cc22852c" exitCode=0 Jan 22 15:00:01 crc kubenswrapper[4773]: I0122 15:00:01.974462 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" event={"ID":"d013539e-176a-4ac4-aaef-512328b2794d","Type":"ContainerDied","Data":"f2ed98c56bdf99b34d1131b1bdea0785cc304f65a1df9861e8834613cc22852c"} Jan 22 15:00:01 crc kubenswrapper[4773]: I0122 15:00:01.974510 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" event={"ID":"d013539e-176a-4ac4-aaef-512328b2794d","Type":"ContainerStarted","Data":"dcfec25132801d23d4cef42ab8e8235248851abe5c3850352c31cb63b78a7bb9"} Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.411394 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.515433 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-978c9\" (UniqueName: \"kubernetes.io/projected/d013539e-176a-4ac4-aaef-512328b2794d-kube-api-access-978c9\") pod \"d013539e-176a-4ac4-aaef-512328b2794d\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.515846 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d013539e-176a-4ac4-aaef-512328b2794d-config-volume\") pod \"d013539e-176a-4ac4-aaef-512328b2794d\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.515986 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d013539e-176a-4ac4-aaef-512328b2794d-secret-volume\") pod \"d013539e-176a-4ac4-aaef-512328b2794d\" (UID: \"d013539e-176a-4ac4-aaef-512328b2794d\") " Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.517480 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d013539e-176a-4ac4-aaef-512328b2794d-config-volume" (OuterVolumeSpecName: "config-volume") pod "d013539e-176a-4ac4-aaef-512328b2794d" (UID: "d013539e-176a-4ac4-aaef-512328b2794d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.522044 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d013539e-176a-4ac4-aaef-512328b2794d-kube-api-access-978c9" (OuterVolumeSpecName: "kube-api-access-978c9") pod "d013539e-176a-4ac4-aaef-512328b2794d" (UID: "d013539e-176a-4ac4-aaef-512328b2794d"). InnerVolumeSpecName "kube-api-access-978c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.522324 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d013539e-176a-4ac4-aaef-512328b2794d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d013539e-176a-4ac4-aaef-512328b2794d" (UID: "d013539e-176a-4ac4-aaef-512328b2794d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.746941 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d013539e-176a-4ac4-aaef-512328b2794d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.746973 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-978c9\" (UniqueName: \"kubernetes.io/projected/d013539e-176a-4ac4-aaef-512328b2794d-kube-api-access-978c9\") on node \"crc\" DevicePath \"\"" Jan 22 15:00:03 crc kubenswrapper[4773]: I0122 15:00:03.746982 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d013539e-176a-4ac4-aaef-512328b2794d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 15:00:04 crc kubenswrapper[4773]: I0122 15:00:04.013901 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" event={"ID":"d013539e-176a-4ac4-aaef-512328b2794d","Type":"ContainerDied","Data":"dcfec25132801d23d4cef42ab8e8235248851abe5c3850352c31cb63b78a7bb9"} Jan 22 15:00:04 crc kubenswrapper[4773]: I0122 15:00:04.013978 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcfec25132801d23d4cef42ab8e8235248851abe5c3850352c31cb63b78a7bb9" Jan 22 15:00:04 crc kubenswrapper[4773]: I0122 15:00:04.013995 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484900-vv578" Jan 22 15:00:04 crc kubenswrapper[4773]: I0122 15:00:04.520225 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh"] Jan 22 15:00:04 crc kubenswrapper[4773]: I0122 15:00:04.541162 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-tcpsh"] Jan 22 15:00:04 crc kubenswrapper[4773]: I0122 15:00:04.673320 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdfaad31-846c-4442-8fdb-8abef2bed5df" path="/var/lib/kubelet/pods/cdfaad31-846c-4442-8fdb-8abef2bed5df/volumes" Jan 22 15:00:16 crc kubenswrapper[4773]: I0122 15:00:16.388560 4773 scope.go:117] "RemoveContainer" containerID="42799ccbd06f9c6b191863eabe94449398668c3f1816ee9998e95ded32776d60" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.157831 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29484901-swpn2"] Jan 22 15:01:00 crc kubenswrapper[4773]: E0122 15:01:00.159260 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d013539e-176a-4ac4-aaef-512328b2794d" containerName="collect-profiles" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.159319 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="d013539e-176a-4ac4-aaef-512328b2794d" containerName="collect-profiles" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.159679 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="d013539e-176a-4ac4-aaef-512328b2794d" containerName="collect-profiles" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.160749 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.171693 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29484901-swpn2"] Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.192167 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-combined-ca-bundle\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.192469 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4t4s\" (UniqueName: \"kubernetes.io/projected/4a16948c-f62e-40f0-aaed-8c306f8c6e32-kube-api-access-q4t4s\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.192611 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-config-data\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.192644 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-fernet-keys\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.295781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-combined-ca-bundle\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.296051 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4t4s\" (UniqueName: \"kubernetes.io/projected/4a16948c-f62e-40f0-aaed-8c306f8c6e32-kube-api-access-q4t4s\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.296173 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-config-data\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.296273 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-fernet-keys\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.306387 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-config-data\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.307185 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-fernet-keys\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.316018 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-combined-ca-bundle\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.316130 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4t4s\" (UniqueName: \"kubernetes.io/projected/4a16948c-f62e-40f0-aaed-8c306f8c6e32-kube-api-access-q4t4s\") pod \"keystone-cron-29484901-swpn2\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.483862 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:00 crc kubenswrapper[4773]: I0122 15:01:00.993693 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29484901-swpn2"] Jan 22 15:01:01 crc kubenswrapper[4773]: I0122 15:01:01.477747 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484901-swpn2" event={"ID":"4a16948c-f62e-40f0-aaed-8c306f8c6e32","Type":"ContainerStarted","Data":"7628e5e22672413dfc3241f868669efa14459c60ebe3a3e10ab4d38b51a02923"} Jan 22 15:01:01 crc kubenswrapper[4773]: I0122 15:01:01.478080 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484901-swpn2" event={"ID":"4a16948c-f62e-40f0-aaed-8c306f8c6e32","Type":"ContainerStarted","Data":"6963121b008b8cce52f6ae8a62af63579003c7064896c2facc789628c2d4ca7b"} Jan 22 15:01:01 crc kubenswrapper[4773]: I0122 15:01:01.506688 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29484901-swpn2" podStartSLOduration=1.506628086 podStartE2EDuration="1.506628086s" podCreationTimestamp="2026-01-22 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 15:01:01.500866643 +0000 UTC m=+11169.078982468" watchObservedRunningTime="2026-01-22 15:01:01.506628086 +0000 UTC m=+11169.084743921" Jan 22 15:01:03 crc kubenswrapper[4773]: I0122 15:01:03.515545 4773 generic.go:334] "Generic (PLEG): container finished" podID="4a16948c-f62e-40f0-aaed-8c306f8c6e32" containerID="7628e5e22672413dfc3241f868669efa14459c60ebe3a3e10ab4d38b51a02923" exitCode=0 Jan 22 15:01:03 crc kubenswrapper[4773]: I0122 15:01:03.515622 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484901-swpn2" event={"ID":"4a16948c-f62e-40f0-aaed-8c306f8c6e32","Type":"ContainerDied","Data":"7628e5e22672413dfc3241f868669efa14459c60ebe3a3e10ab4d38b51a02923"} Jan 22 15:01:04 crc kubenswrapper[4773]: I0122 15:01:04.074603 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:01:04 crc kubenswrapper[4773]: I0122 15:01:04.074767 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:01:04 crc kubenswrapper[4773]: I0122 15:01:04.984771 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.133381 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-fernet-keys\") pod \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.133731 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-config-data\") pod \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.133834 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-combined-ca-bundle\") pod \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.133929 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4t4s\" (UniqueName: \"kubernetes.io/projected/4a16948c-f62e-40f0-aaed-8c306f8c6e32-kube-api-access-q4t4s\") pod \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\" (UID: \"4a16948c-f62e-40f0-aaed-8c306f8c6e32\") " Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.140731 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4a16948c-f62e-40f0-aaed-8c306f8c6e32" (UID: "4a16948c-f62e-40f0-aaed-8c306f8c6e32"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.147848 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a16948c-f62e-40f0-aaed-8c306f8c6e32-kube-api-access-q4t4s" (OuterVolumeSpecName: "kube-api-access-q4t4s") pod "4a16948c-f62e-40f0-aaed-8c306f8c6e32" (UID: "4a16948c-f62e-40f0-aaed-8c306f8c6e32"). InnerVolumeSpecName "kube-api-access-q4t4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.166140 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a16948c-f62e-40f0-aaed-8c306f8c6e32" (UID: "4a16948c-f62e-40f0-aaed-8c306f8c6e32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.206041 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-config-data" (OuterVolumeSpecName: "config-data") pod "4a16948c-f62e-40f0-aaed-8c306f8c6e32" (UID: "4a16948c-f62e-40f0-aaed-8c306f8c6e32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.236757 4773 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.236809 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4t4s\" (UniqueName: \"kubernetes.io/projected/4a16948c-f62e-40f0-aaed-8c306f8c6e32-kube-api-access-q4t4s\") on node \"crc\" DevicePath \"\"" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.236880 4773 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.236904 4773 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a16948c-f62e-40f0-aaed-8c306f8c6e32-config-data\") on node \"crc\" DevicePath \"\"" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.549160 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29484901-swpn2" Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.550099 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29484901-swpn2" event={"ID":"4a16948c-f62e-40f0-aaed-8c306f8c6e32","Type":"ContainerDied","Data":"6963121b008b8cce52f6ae8a62af63579003c7064896c2facc789628c2d4ca7b"} Jan 22 15:01:05 crc kubenswrapper[4773]: I0122 15:01:05.550166 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6963121b008b8cce52f6ae8a62af63579003c7064896c2facc789628c2d4ca7b" Jan 22 15:01:34 crc kubenswrapper[4773]: I0122 15:01:34.074747 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:01:34 crc kubenswrapper[4773]: I0122 15:01:34.075485 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.074621 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.075096 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.075156 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.076276 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.076385 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" gracePeriod=600 Jan 22 15:02:04 crc kubenswrapper[4773]: E0122 15:02:04.214649 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.241458 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" exitCode=0 Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.241513 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf"} Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.241585 4773 scope.go:117] "RemoveContainer" containerID="99cb1843fbb7f938fc4868c933a2003b999605e434e479325c259424ac7ace6f" Jan 22 15:02:04 crc kubenswrapper[4773]: I0122 15:02:04.242857 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:02:04 crc kubenswrapper[4773]: E0122 15:02:04.243331 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:02:16 crc kubenswrapper[4773]: I0122 15:02:16.658533 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:02:16 crc kubenswrapper[4773]: E0122 15:02:16.659577 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:02:27 crc kubenswrapper[4773]: I0122 15:02:27.661873 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:02:27 crc kubenswrapper[4773]: E0122 15:02:27.663388 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.244156 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vsqvl"] Jan 22 15:02:35 crc kubenswrapper[4773]: E0122 15:02:35.245692 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a16948c-f62e-40f0-aaed-8c306f8c6e32" containerName="keystone-cron" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.245718 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a16948c-f62e-40f0-aaed-8c306f8c6e32" containerName="keystone-cron" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.246103 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a16948c-f62e-40f0-aaed-8c306f8c6e32" containerName="keystone-cron" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.248940 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.269160 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vsqvl"] Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.435655 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-catalog-content\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.435806 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-utilities\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.436203 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpbpq\" (UniqueName: \"kubernetes.io/projected/c581f645-e462-4c33-950b-826064550709-kube-api-access-fpbpq\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.537593 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-catalog-content\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.537693 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-utilities\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.537781 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpbpq\" (UniqueName: \"kubernetes.io/projected/c581f645-e462-4c33-950b-826064550709-kube-api-access-fpbpq\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.538479 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-catalog-content\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.538743 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-utilities\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.560758 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpbpq\" (UniqueName: \"kubernetes.io/projected/c581f645-e462-4c33-950b-826064550709-kube-api-access-fpbpq\") pod \"redhat-marketplace-vsqvl\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:35 crc kubenswrapper[4773]: I0122 15:02:35.599165 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:36 crc kubenswrapper[4773]: I0122 15:02:36.131514 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vsqvl"] Jan 22 15:02:36 crc kubenswrapper[4773]: W0122 15:02:36.139915 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc581f645_e462_4c33_950b_826064550709.slice/crio-d762084e93f22d44c35c084199ddd83631b54d76503daf862bb79842952d8b23 WatchSource:0}: Error finding container d762084e93f22d44c35c084199ddd83631b54d76503daf862bb79842952d8b23: Status 404 returned error can't find the container with id d762084e93f22d44c35c084199ddd83631b54d76503daf862bb79842952d8b23 Jan 22 15:02:36 crc kubenswrapper[4773]: I0122 15:02:36.704995 4773 generic.go:334] "Generic (PLEG): container finished" podID="c581f645-e462-4c33-950b-826064550709" containerID="77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722" exitCode=0 Jan 22 15:02:36 crc kubenswrapper[4773]: I0122 15:02:36.705108 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vsqvl" event={"ID":"c581f645-e462-4c33-950b-826064550709","Type":"ContainerDied","Data":"77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722"} Jan 22 15:02:36 crc kubenswrapper[4773]: I0122 15:02:36.705334 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vsqvl" event={"ID":"c581f645-e462-4c33-950b-826064550709","Type":"ContainerStarted","Data":"d762084e93f22d44c35c084199ddd83631b54d76503daf862bb79842952d8b23"} Jan 22 15:02:36 crc kubenswrapper[4773]: I0122 15:02:36.709163 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 15:02:38 crc kubenswrapper[4773]: I0122 15:02:38.732297 4773 generic.go:334] "Generic (PLEG): container finished" podID="c581f645-e462-4c33-950b-826064550709" containerID="ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1" exitCode=0 Jan 22 15:02:38 crc kubenswrapper[4773]: I0122 15:02:38.732417 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vsqvl" event={"ID":"c581f645-e462-4c33-950b-826064550709","Type":"ContainerDied","Data":"ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1"} Jan 22 15:02:39 crc kubenswrapper[4773]: I0122 15:02:39.751785 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vsqvl" event={"ID":"c581f645-e462-4c33-950b-826064550709","Type":"ContainerStarted","Data":"aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2"} Jan 22 15:02:39 crc kubenswrapper[4773]: I0122 15:02:39.781162 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vsqvl" podStartSLOduration=2.330124468 podStartE2EDuration="4.781134862s" podCreationTimestamp="2026-01-22 15:02:35 +0000 UTC" firstStartedPulling="2026-01-22 15:02:36.708880715 +0000 UTC m=+11264.286996540" lastFinishedPulling="2026-01-22 15:02:39.159891109 +0000 UTC m=+11266.738006934" observedRunningTime="2026-01-22 15:02:39.778046275 +0000 UTC m=+11267.356162120" watchObservedRunningTime="2026-01-22 15:02:39.781134862 +0000 UTC m=+11267.359250687" Jan 22 15:02:40 crc kubenswrapper[4773]: I0122 15:02:40.662976 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:02:40 crc kubenswrapper[4773]: E0122 15:02:40.663557 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:02:45 crc kubenswrapper[4773]: I0122 15:02:45.600006 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:45 crc kubenswrapper[4773]: I0122 15:02:45.600394 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:45 crc kubenswrapper[4773]: I0122 15:02:45.675652 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:45 crc kubenswrapper[4773]: I0122 15:02:45.880930 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:45 crc kubenswrapper[4773]: I0122 15:02:45.980863 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vsqvl"] Jan 22 15:02:47 crc kubenswrapper[4773]: I0122 15:02:47.861003 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vsqvl" podUID="c581f645-e462-4c33-950b-826064550709" containerName="registry-server" containerID="cri-o://aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2" gracePeriod=2 Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.510118 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.612052 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-utilities\") pod \"c581f645-e462-4c33-950b-826064550709\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.612125 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-catalog-content\") pod \"c581f645-e462-4c33-950b-826064550709\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.612153 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpbpq\" (UniqueName: \"kubernetes.io/projected/c581f645-e462-4c33-950b-826064550709-kube-api-access-fpbpq\") pod \"c581f645-e462-4c33-950b-826064550709\" (UID: \"c581f645-e462-4c33-950b-826064550709\") " Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.613168 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-utilities" (OuterVolumeSpecName: "utilities") pod "c581f645-e462-4c33-950b-826064550709" (UID: "c581f645-e462-4c33-950b-826064550709"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.617661 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c581f645-e462-4c33-950b-826064550709-kube-api-access-fpbpq" (OuterVolumeSpecName: "kube-api-access-fpbpq") pod "c581f645-e462-4c33-950b-826064550709" (UID: "c581f645-e462-4c33-950b-826064550709"). InnerVolumeSpecName "kube-api-access-fpbpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.642248 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c581f645-e462-4c33-950b-826064550709" (UID: "c581f645-e462-4c33-950b-826064550709"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.715130 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.715182 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c581f645-e462-4c33-950b-826064550709-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.715197 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpbpq\" (UniqueName: \"kubernetes.io/projected/c581f645-e462-4c33-950b-826064550709-kube-api-access-fpbpq\") on node \"crc\" DevicePath \"\"" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.874230 4773 generic.go:334] "Generic (PLEG): container finished" podID="c581f645-e462-4c33-950b-826064550709" containerID="aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2" exitCode=0 Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.874317 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vsqvl" event={"ID":"c581f645-e462-4c33-950b-826064550709","Type":"ContainerDied","Data":"aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2"} Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.874666 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vsqvl" event={"ID":"c581f645-e462-4c33-950b-826064550709","Type":"ContainerDied","Data":"d762084e93f22d44c35c084199ddd83631b54d76503daf862bb79842952d8b23"} Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.874698 4773 scope.go:117] "RemoveContainer" containerID="aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.874342 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vsqvl" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.924908 4773 scope.go:117] "RemoveContainer" containerID="ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.929460 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vsqvl"] Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.948334 4773 scope.go:117] "RemoveContainer" containerID="77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722" Jan 22 15:02:48 crc kubenswrapper[4773]: I0122 15:02:48.951090 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vsqvl"] Jan 22 15:02:49 crc kubenswrapper[4773]: I0122 15:02:49.006557 4773 scope.go:117] "RemoveContainer" containerID="aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2" Jan 22 15:02:49 crc kubenswrapper[4773]: E0122 15:02:49.007214 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2\": container with ID starting with aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2 not found: ID does not exist" containerID="aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2" Jan 22 15:02:49 crc kubenswrapper[4773]: I0122 15:02:49.007257 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2"} err="failed to get container status \"aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2\": rpc error: code = NotFound desc = could not find container \"aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2\": container with ID starting with aa9b8bc91dfe94d7f22b71a8736d71791e3df48c98233c793ed1f2256e7880e2 not found: ID does not exist" Jan 22 15:02:49 crc kubenswrapper[4773]: I0122 15:02:49.007298 4773 scope.go:117] "RemoveContainer" containerID="ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1" Jan 22 15:02:49 crc kubenswrapper[4773]: E0122 15:02:49.007712 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1\": container with ID starting with ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1 not found: ID does not exist" containerID="ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1" Jan 22 15:02:49 crc kubenswrapper[4773]: I0122 15:02:49.007737 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1"} err="failed to get container status \"ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1\": rpc error: code = NotFound desc = could not find container \"ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1\": container with ID starting with ad288796dbc19c2578b192cb191770a96eeeb62d0fd8dc867a0235004d4546b1 not found: ID does not exist" Jan 22 15:02:49 crc kubenswrapper[4773]: I0122 15:02:49.007752 4773 scope.go:117] "RemoveContainer" containerID="77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722" Jan 22 15:02:49 crc kubenswrapper[4773]: E0122 15:02:49.008024 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722\": container with ID starting with 77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722 not found: ID does not exist" containerID="77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722" Jan 22 15:02:49 crc kubenswrapper[4773]: I0122 15:02:49.008049 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722"} err="failed to get container status \"77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722\": rpc error: code = NotFound desc = could not find container \"77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722\": container with ID starting with 77d387055955cebf6cb86416e0a4c2e86cbd76003378939c321a731706ac9722 not found: ID does not exist" Jan 22 15:02:50 crc kubenswrapper[4773]: I0122 15:02:50.675135 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c581f645-e462-4c33-950b-826064550709" path="/var/lib/kubelet/pods/c581f645-e462-4c33-950b-826064550709/volumes" Jan 22 15:02:54 crc kubenswrapper[4773]: I0122 15:02:54.666138 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:02:54 crc kubenswrapper[4773]: E0122 15:02:54.667480 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:03:05 crc kubenswrapper[4773]: I0122 15:03:05.658587 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:03:05 crc kubenswrapper[4773]: E0122 15:03:05.659857 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:03:16 crc kubenswrapper[4773]: I0122 15:03:16.658437 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:03:16 crc kubenswrapper[4773]: E0122 15:03:16.659247 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:03:29 crc kubenswrapper[4773]: I0122 15:03:29.658510 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:03:29 crc kubenswrapper[4773]: E0122 15:03:29.659278 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:03:41 crc kubenswrapper[4773]: I0122 15:03:41.658337 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:03:41 crc kubenswrapper[4773]: E0122 15:03:41.659098 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:03:56 crc kubenswrapper[4773]: I0122 15:03:56.659493 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:03:56 crc kubenswrapper[4773]: E0122 15:03:56.660582 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:04:07 crc kubenswrapper[4773]: I0122 15:04:07.658968 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:04:07 crc kubenswrapper[4773]: E0122 15:04:07.659697 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:04:16 crc kubenswrapper[4773]: I0122 15:04:16.585222 4773 scope.go:117] "RemoveContainer" containerID="a17e048d5e8c9f385e8b921e7b038663466d4f91e2fe6a6fd6243e5074549ca6" Jan 22 15:04:16 crc kubenswrapper[4773]: I0122 15:04:16.631776 4773 scope.go:117] "RemoveContainer" containerID="2cb537a60875a3c702f4115e67b736b166be2a6d75626b329d460a03b875bab7" Jan 22 15:04:16 crc kubenswrapper[4773]: I0122 15:04:16.695207 4773 scope.go:117] "RemoveContainer" containerID="b21fe309d97644ad8e41b9af6d398023ce08ee91d11ab9392c2658aa1e62209d" Jan 22 15:04:18 crc kubenswrapper[4773]: I0122 15:04:18.658637 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:04:18 crc kubenswrapper[4773]: E0122 15:04:18.659234 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:04:33 crc kubenswrapper[4773]: I0122 15:04:33.658811 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:04:33 crc kubenswrapper[4773]: E0122 15:04:33.659972 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:04:45 crc kubenswrapper[4773]: I0122 15:04:45.657811 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:04:45 crc kubenswrapper[4773]: E0122 15:04:45.658694 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:04:56 crc kubenswrapper[4773]: I0122 15:04:56.662992 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:04:56 crc kubenswrapper[4773]: E0122 15:04:56.663636 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:05:08 crc kubenswrapper[4773]: I0122 15:05:08.659061 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:05:08 crc kubenswrapper[4773]: E0122 15:05:08.659956 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:05:20 crc kubenswrapper[4773]: I0122 15:05:20.659611 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:05:20 crc kubenswrapper[4773]: E0122 15:05:20.660885 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:05:33 crc kubenswrapper[4773]: I0122 15:05:33.658247 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:05:33 crc kubenswrapper[4773]: E0122 15:05:33.659054 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:05:45 crc kubenswrapper[4773]: I0122 15:05:45.659007 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:05:45 crc kubenswrapper[4773]: E0122 15:05:45.659874 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:05:58 crc kubenswrapper[4773]: I0122 15:05:58.658334 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:05:58 crc kubenswrapper[4773]: E0122 15:05:58.658981 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:06:12 crc kubenswrapper[4773]: I0122 15:06:12.667650 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:06:12 crc kubenswrapper[4773]: E0122 15:06:12.668316 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:06:23 crc kubenswrapper[4773]: I0122 15:06:23.658231 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:06:23 crc kubenswrapper[4773]: E0122 15:06:23.658947 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:06:36 crc kubenswrapper[4773]: I0122 15:06:36.658434 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:06:36 crc kubenswrapper[4773]: E0122 15:06:36.659515 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:06:47 crc kubenswrapper[4773]: I0122 15:06:47.660094 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:06:47 crc kubenswrapper[4773]: E0122 15:06:47.661201 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:07:02 crc kubenswrapper[4773]: I0122 15:07:02.669820 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:07:02 crc kubenswrapper[4773]: E0122 15:07:02.671056 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:07:16 crc kubenswrapper[4773]: I0122 15:07:16.659390 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:07:17 crc kubenswrapper[4773]: I0122 15:07:17.504084 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"8775d63fdcde1d12deda78463190e6b3e682b644f26c578123126ce25d4c03f0"} Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.761982 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-svl5m"] Jan 22 15:07:27 crc kubenswrapper[4773]: E0122 15:07:27.763973 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c581f645-e462-4c33-950b-826064550709" containerName="registry-server" Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.764014 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c581f645-e462-4c33-950b-826064550709" containerName="registry-server" Jan 22 15:07:27 crc kubenswrapper[4773]: E0122 15:07:27.764065 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c581f645-e462-4c33-950b-826064550709" containerName="extract-utilities" Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.764086 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c581f645-e462-4c33-950b-826064550709" containerName="extract-utilities" Jan 22 15:07:27 crc kubenswrapper[4773]: E0122 15:07:27.764161 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c581f645-e462-4c33-950b-826064550709" containerName="extract-content" Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.764180 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="c581f645-e462-4c33-950b-826064550709" containerName="extract-content" Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.764689 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="c581f645-e462-4c33-950b-826064550709" containerName="registry-server" Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.768063 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:27 crc kubenswrapper[4773]: I0122 15:07:27.800188 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-svl5m"] Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.013081 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-utilities\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.013133 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t45kz\" (UniqueName: \"kubernetes.io/projected/aa0629fb-518b-4dd9-ac84-826d8458a483-kube-api-access-t45kz\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.013315 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-catalog-content\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.114899 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-catalog-content\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.115030 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-utilities\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.115063 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t45kz\" (UniqueName: \"kubernetes.io/projected/aa0629fb-518b-4dd9-ac84-826d8458a483-kube-api-access-t45kz\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.115716 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-catalog-content\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.115998 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-utilities\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.139926 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t45kz\" (UniqueName: \"kubernetes.io/projected/aa0629fb-518b-4dd9-ac84-826d8458a483-kube-api-access-t45kz\") pod \"community-operators-svl5m\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.397643 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:28 crc kubenswrapper[4773]: I0122 15:07:28.932663 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-svl5m"] Jan 22 15:07:29 crc kubenswrapper[4773]: I0122 15:07:29.710371 4773 generic.go:334] "Generic (PLEG): container finished" podID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerID="b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b" exitCode=0 Jan 22 15:07:29 crc kubenswrapper[4773]: I0122 15:07:29.710473 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-svl5m" event={"ID":"aa0629fb-518b-4dd9-ac84-826d8458a483","Type":"ContainerDied","Data":"b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b"} Jan 22 15:07:29 crc kubenswrapper[4773]: I0122 15:07:29.710779 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-svl5m" event={"ID":"aa0629fb-518b-4dd9-ac84-826d8458a483","Type":"ContainerStarted","Data":"86725a0f0e4d88273ba264e37c25e611a06e259115e97d8e2b7af46692b3e43b"} Jan 22 15:07:31 crc kubenswrapper[4773]: I0122 15:07:31.737402 4773 generic.go:334] "Generic (PLEG): container finished" podID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerID="cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435" exitCode=0 Jan 22 15:07:31 crc kubenswrapper[4773]: I0122 15:07:31.737519 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-svl5m" event={"ID":"aa0629fb-518b-4dd9-ac84-826d8458a483","Type":"ContainerDied","Data":"cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435"} Jan 22 15:07:32 crc kubenswrapper[4773]: I0122 15:07:32.753492 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-svl5m" event={"ID":"aa0629fb-518b-4dd9-ac84-826d8458a483","Type":"ContainerStarted","Data":"cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d"} Jan 22 15:07:32 crc kubenswrapper[4773]: I0122 15:07:32.782719 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-svl5m" podStartSLOduration=3.280234559 podStartE2EDuration="5.782686476s" podCreationTimestamp="2026-01-22 15:07:27 +0000 UTC" firstStartedPulling="2026-01-22 15:07:29.714685339 +0000 UTC m=+11557.292801204" lastFinishedPulling="2026-01-22 15:07:32.217137286 +0000 UTC m=+11559.795253121" observedRunningTime="2026-01-22 15:07:32.77574127 +0000 UTC m=+11560.353857125" watchObservedRunningTime="2026-01-22 15:07:32.782686476 +0000 UTC m=+11560.360802331" Jan 22 15:07:38 crc kubenswrapper[4773]: I0122 15:07:38.400542 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:38 crc kubenswrapper[4773]: I0122 15:07:38.400968 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:38 crc kubenswrapper[4773]: I0122 15:07:38.481947 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:38 crc kubenswrapper[4773]: I0122 15:07:38.876495 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:38 crc kubenswrapper[4773]: I0122 15:07:38.934531 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-svl5m"] Jan 22 15:07:40 crc kubenswrapper[4773]: I0122 15:07:40.867430 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-svl5m" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="registry-server" containerID="cri-o://cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d" gracePeriod=2 Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.372076 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.475066 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-utilities\") pod \"aa0629fb-518b-4dd9-ac84-826d8458a483\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.475145 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-catalog-content\") pod \"aa0629fb-518b-4dd9-ac84-826d8458a483\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.475200 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t45kz\" (UniqueName: \"kubernetes.io/projected/aa0629fb-518b-4dd9-ac84-826d8458a483-kube-api-access-t45kz\") pod \"aa0629fb-518b-4dd9-ac84-826d8458a483\" (UID: \"aa0629fb-518b-4dd9-ac84-826d8458a483\") " Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.476936 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-utilities" (OuterVolumeSpecName: "utilities") pod "aa0629fb-518b-4dd9-ac84-826d8458a483" (UID: "aa0629fb-518b-4dd9-ac84-826d8458a483"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.486888 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa0629fb-518b-4dd9-ac84-826d8458a483-kube-api-access-t45kz" (OuterVolumeSpecName: "kube-api-access-t45kz") pod "aa0629fb-518b-4dd9-ac84-826d8458a483" (UID: "aa0629fb-518b-4dd9-ac84-826d8458a483"). InnerVolumeSpecName "kube-api-access-t45kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.526928 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa0629fb-518b-4dd9-ac84-826d8458a483" (UID: "aa0629fb-518b-4dd9-ac84-826d8458a483"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.577571 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.577614 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa0629fb-518b-4dd9-ac84-826d8458a483-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.577630 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t45kz\" (UniqueName: \"kubernetes.io/projected/aa0629fb-518b-4dd9-ac84-826d8458a483-kube-api-access-t45kz\") on node \"crc\" DevicePath \"\"" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.884408 4773 generic.go:334] "Generic (PLEG): container finished" podID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerID="cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d" exitCode=0 Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.884476 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-svl5m" event={"ID":"aa0629fb-518b-4dd9-ac84-826d8458a483","Type":"ContainerDied","Data":"cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d"} Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.884535 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-svl5m" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.884633 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-svl5m" event={"ID":"aa0629fb-518b-4dd9-ac84-826d8458a483","Type":"ContainerDied","Data":"86725a0f0e4d88273ba264e37c25e611a06e259115e97d8e2b7af46692b3e43b"} Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.884682 4773 scope.go:117] "RemoveContainer" containerID="cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.930785 4773 scope.go:117] "RemoveContainer" containerID="cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435" Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.958367 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-svl5m"] Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.969812 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-svl5m"] Jan 22 15:07:41 crc kubenswrapper[4773]: I0122 15:07:41.987880 4773 scope.go:117] "RemoveContainer" containerID="b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.032574 4773 scope.go:117] "RemoveContainer" containerID="cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d" Jan 22 15:07:42 crc kubenswrapper[4773]: E0122 15:07:42.033491 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d\": container with ID starting with cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d not found: ID does not exist" containerID="cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.033540 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d"} err="failed to get container status \"cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d\": rpc error: code = NotFound desc = could not find container \"cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d\": container with ID starting with cd9785ae7ad595b8a7e6712b320ae230600d4f893bfb60f9131cc367cbb80c0d not found: ID does not exist" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.033573 4773 scope.go:117] "RemoveContainer" containerID="cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435" Jan 22 15:07:42 crc kubenswrapper[4773]: E0122 15:07:42.034351 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435\": container with ID starting with cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435 not found: ID does not exist" containerID="cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.034402 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435"} err="failed to get container status \"cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435\": rpc error: code = NotFound desc = could not find container \"cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435\": container with ID starting with cb2ffa4fe675be7a5e14e27e4d2a5103dda12c472c375f1bcfab1cef41ea1435 not found: ID does not exist" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.034424 4773 scope.go:117] "RemoveContainer" containerID="b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b" Jan 22 15:07:42 crc kubenswrapper[4773]: E0122 15:07:42.034846 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b\": container with ID starting with b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b not found: ID does not exist" containerID="b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.034907 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b"} err="failed to get container status \"b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b\": rpc error: code = NotFound desc = could not find container \"b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b\": container with ID starting with b590c5433241c8199ce774355861024a9900e50e7be4fb42f5a4e9c1c477966b not found: ID does not exist" Jan 22 15:07:42 crc kubenswrapper[4773]: I0122 15:07:42.670075 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" path="/var/lib/kubelet/pods/aa0629fb-518b-4dd9-ac84-826d8458a483/volumes" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.019870 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-c9l2t"] Jan 22 15:08:04 crc kubenswrapper[4773]: E0122 15:08:04.020884 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="extract-content" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.020897 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="extract-content" Jan 22 15:08:04 crc kubenswrapper[4773]: E0122 15:08:04.020914 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="registry-server" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.020920 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="registry-server" Jan 22 15:08:04 crc kubenswrapper[4773]: E0122 15:08:04.020936 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="extract-utilities" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.020943 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="extract-utilities" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.021199 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa0629fb-518b-4dd9-ac84-826d8458a483" containerName="registry-server" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.022786 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.033047 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c9l2t"] Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.180660 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-utilities\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.180759 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24pck\" (UniqueName: \"kubernetes.io/projected/5cd7eeb7-eb8c-4798-a606-ad8290374843-kube-api-access-24pck\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.180797 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-catalog-content\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.283552 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24pck\" (UniqueName: \"kubernetes.io/projected/5cd7eeb7-eb8c-4798-a606-ad8290374843-kube-api-access-24pck\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.283644 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-catalog-content\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.283792 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-utilities\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.284319 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-utilities\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.284680 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-catalog-content\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.305276 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24pck\" (UniqueName: \"kubernetes.io/projected/5cd7eeb7-eb8c-4798-a606-ad8290374843-kube-api-access-24pck\") pod \"redhat-operators-c9l2t\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.364936 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:04 crc kubenswrapper[4773]: I0122 15:08:04.911893 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-c9l2t"] Jan 22 15:08:05 crc kubenswrapper[4773]: I0122 15:08:05.148103 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerStarted","Data":"ffb1e4f3a5959e9dbf3394e15c7ca585218f9393049a614d86c0d2f7c8439c4f"} Jan 22 15:08:06 crc kubenswrapper[4773]: I0122 15:08:06.160672 4773 generic.go:334] "Generic (PLEG): container finished" podID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerID="20b59fe56dc766037f3ea18979d96a7c345df22c3fe950fdac1c4d240bf2f626" exitCode=0 Jan 22 15:08:06 crc kubenswrapper[4773]: I0122 15:08:06.160748 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerDied","Data":"20b59fe56dc766037f3ea18979d96a7c345df22c3fe950fdac1c4d240bf2f626"} Jan 22 15:08:06 crc kubenswrapper[4773]: I0122 15:08:06.165720 4773 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 15:08:08 crc kubenswrapper[4773]: I0122 15:08:08.184936 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerStarted","Data":"3973e322404dec28a286ba519c870b92612d037ecee774b0f4aa8734aaa02f3e"} Jan 22 15:08:10 crc kubenswrapper[4773]: I0122 15:08:10.205846 4773 generic.go:334] "Generic (PLEG): container finished" podID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerID="3973e322404dec28a286ba519c870b92612d037ecee774b0f4aa8734aaa02f3e" exitCode=0 Jan 22 15:08:10 crc kubenswrapper[4773]: I0122 15:08:10.206043 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerDied","Data":"3973e322404dec28a286ba519c870b92612d037ecee774b0f4aa8734aaa02f3e"} Jan 22 15:08:20 crc kubenswrapper[4773]: I0122 15:08:20.328110 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerStarted","Data":"2cecddfbca3f291b30e8833f732b34dc39d94274cfad33ae4a884d783f761649"} Jan 22 15:08:20 crc kubenswrapper[4773]: I0122 15:08:20.357837 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-c9l2t" podStartSLOduration=3.712576894 podStartE2EDuration="17.357813568s" podCreationTimestamp="2026-01-22 15:08:03 +0000 UTC" firstStartedPulling="2026-01-22 15:08:06.165425083 +0000 UTC m=+11593.743540908" lastFinishedPulling="2026-01-22 15:08:19.810661757 +0000 UTC m=+11607.388777582" observedRunningTime="2026-01-22 15:08:20.352629082 +0000 UTC m=+11607.930744927" watchObservedRunningTime="2026-01-22 15:08:20.357813568 +0000 UTC m=+11607.935929403" Jan 22 15:08:24 crc kubenswrapper[4773]: I0122 15:08:24.365925 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:24 crc kubenswrapper[4773]: I0122 15:08:24.369180 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:25 crc kubenswrapper[4773]: I0122 15:08:25.434005 4773 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-c9l2t" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="registry-server" probeResult="failure" output=< Jan 22 15:08:25 crc kubenswrapper[4773]: timeout: failed to connect service ":50051" within 1s Jan 22 15:08:25 crc kubenswrapper[4773]: > Jan 22 15:08:34 crc kubenswrapper[4773]: I0122 15:08:34.413847 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:34 crc kubenswrapper[4773]: I0122 15:08:34.498970 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:35 crc kubenswrapper[4773]: I0122 15:08:35.198187 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c9l2t"] Jan 22 15:08:35 crc kubenswrapper[4773]: I0122 15:08:35.519945 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-c9l2t" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="registry-server" containerID="cri-o://2cecddfbca3f291b30e8833f732b34dc39d94274cfad33ae4a884d783f761649" gracePeriod=2 Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.533667 4773 generic.go:334] "Generic (PLEG): container finished" podID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerID="2cecddfbca3f291b30e8833f732b34dc39d94274cfad33ae4a884d783f761649" exitCode=0 Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.533985 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerDied","Data":"2cecddfbca3f291b30e8833f732b34dc39d94274cfad33ae4a884d783f761649"} Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.534012 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-c9l2t" event={"ID":"5cd7eeb7-eb8c-4798-a606-ad8290374843","Type":"ContainerDied","Data":"ffb1e4f3a5959e9dbf3394e15c7ca585218f9393049a614d86c0d2f7c8439c4f"} Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.534025 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffb1e4f3a5959e9dbf3394e15c7ca585218f9393049a614d86c0d2f7c8439c4f" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.564507 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.632762 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24pck\" (UniqueName: \"kubernetes.io/projected/5cd7eeb7-eb8c-4798-a606-ad8290374843-kube-api-access-24pck\") pod \"5cd7eeb7-eb8c-4798-a606-ad8290374843\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.632799 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-utilities\") pod \"5cd7eeb7-eb8c-4798-a606-ad8290374843\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.632863 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-catalog-content\") pod \"5cd7eeb7-eb8c-4798-a606-ad8290374843\" (UID: \"5cd7eeb7-eb8c-4798-a606-ad8290374843\") " Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.634374 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-utilities" (OuterVolumeSpecName: "utilities") pod "5cd7eeb7-eb8c-4798-a606-ad8290374843" (UID: "5cd7eeb7-eb8c-4798-a606-ad8290374843"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.639083 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cd7eeb7-eb8c-4798-a606-ad8290374843-kube-api-access-24pck" (OuterVolumeSpecName: "kube-api-access-24pck") pod "5cd7eeb7-eb8c-4798-a606-ad8290374843" (UID: "5cd7eeb7-eb8c-4798-a606-ad8290374843"). InnerVolumeSpecName "kube-api-access-24pck". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.735400 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24pck\" (UniqueName: \"kubernetes.io/projected/5cd7eeb7-eb8c-4798-a606-ad8290374843-kube-api-access-24pck\") on node \"crc\" DevicePath \"\"" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.735447 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.767995 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cd7eeb7-eb8c-4798-a606-ad8290374843" (UID: "5cd7eeb7-eb8c-4798-a606-ad8290374843"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:36.837313 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd7eeb7-eb8c-4798-a606-ad8290374843-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:37.546748 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-c9l2t" Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:37.604541 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-c9l2t"] Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:37.617106 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-c9l2t"] Jan 22 15:08:38 crc kubenswrapper[4773]: I0122 15:08:38.669864 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" path="/var/lib/kubelet/pods/5cd7eeb7-eb8c-4798-a606-ad8290374843/volumes" Jan 22 15:09:34 crc kubenswrapper[4773]: I0122 15:09:34.074706 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:09:34 crc kubenswrapper[4773]: I0122 15:09:34.075376 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.205711 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dh6jt"] Jan 22 15:09:49 crc kubenswrapper[4773]: E0122 15:09:49.207024 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="extract-utilities" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.207055 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="extract-utilities" Jan 22 15:09:49 crc kubenswrapper[4773]: E0122 15:09:49.207083 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="extract-content" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.207095 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="extract-content" Jan 22 15:09:49 crc kubenswrapper[4773]: E0122 15:09:49.207110 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="registry-server" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.207123 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="registry-server" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.207596 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd7eeb7-eb8c-4798-a606-ad8290374843" containerName="registry-server" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.210187 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.224495 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dh6jt"] Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.244521 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-utilities\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.245112 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plwbs\" (UniqueName: \"kubernetes.io/projected/10ef6b01-da4c-427e-8a49-29767df69cd5-kube-api-access-plwbs\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.245441 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-catalog-content\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.347656 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-utilities\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.347998 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plwbs\" (UniqueName: \"kubernetes.io/projected/10ef6b01-da4c-427e-8a49-29767df69cd5-kube-api-access-plwbs\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.348167 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-utilities\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.348272 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-catalog-content\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.348723 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-catalog-content\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.367202 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plwbs\" (UniqueName: \"kubernetes.io/projected/10ef6b01-da4c-427e-8a49-29767df69cd5-kube-api-access-plwbs\") pod \"certified-operators-dh6jt\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:49 crc kubenswrapper[4773]: I0122 15:09:49.536585 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:50 crc kubenswrapper[4773]: I0122 15:09:50.071638 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dh6jt"] Jan 22 15:09:50 crc kubenswrapper[4773]: W0122 15:09:50.090807 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10ef6b01_da4c_427e_8a49_29767df69cd5.slice/crio-61b996ef77e5f9e6489845b0bc0325107814c9392c534cd57fe1dfbfed4963bf WatchSource:0}: Error finding container 61b996ef77e5f9e6489845b0bc0325107814c9392c534cd57fe1dfbfed4963bf: Status 404 returned error can't find the container with id 61b996ef77e5f9e6489845b0bc0325107814c9392c534cd57fe1dfbfed4963bf Jan 22 15:09:50 crc kubenswrapper[4773]: I0122 15:09:50.392480 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dh6jt" event={"ID":"10ef6b01-da4c-427e-8a49-29767df69cd5","Type":"ContainerStarted","Data":"61b996ef77e5f9e6489845b0bc0325107814c9392c534cd57fe1dfbfed4963bf"} Jan 22 15:09:51 crc kubenswrapper[4773]: I0122 15:09:51.427539 4773 generic.go:334] "Generic (PLEG): container finished" podID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerID="65ec19b46e122bd4b46d3bcbaa94522566149051cb2a11cb86a8f62f832af133" exitCode=0 Jan 22 15:09:51 crc kubenswrapper[4773]: I0122 15:09:51.427620 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dh6jt" event={"ID":"10ef6b01-da4c-427e-8a49-29767df69cd5","Type":"ContainerDied","Data":"65ec19b46e122bd4b46d3bcbaa94522566149051cb2a11cb86a8f62f832af133"} Jan 22 15:09:54 crc kubenswrapper[4773]: I0122 15:09:54.510430 4773 generic.go:334] "Generic (PLEG): container finished" podID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerID="c4dc2d82db395550fec90e17397c46b9a64b7655ffe424ef11341919e7d65771" exitCode=0 Jan 22 15:09:54 crc kubenswrapper[4773]: I0122 15:09:54.510509 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dh6jt" event={"ID":"10ef6b01-da4c-427e-8a49-29767df69cd5","Type":"ContainerDied","Data":"c4dc2d82db395550fec90e17397c46b9a64b7655ffe424ef11341919e7d65771"} Jan 22 15:09:56 crc kubenswrapper[4773]: I0122 15:09:56.531706 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dh6jt" event={"ID":"10ef6b01-da4c-427e-8a49-29767df69cd5","Type":"ContainerStarted","Data":"c5baa6370d9e85c3075030457e2c55a73be9d4f203bc53e93437780901d9c80b"} Jan 22 15:09:56 crc kubenswrapper[4773]: I0122 15:09:56.552318 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dh6jt" podStartSLOduration=3.110653582 podStartE2EDuration="7.552275247s" podCreationTimestamp="2026-01-22 15:09:49 +0000 UTC" firstStartedPulling="2026-01-22 15:09:51.432838642 +0000 UTC m=+11699.010954467" lastFinishedPulling="2026-01-22 15:09:55.874460307 +0000 UTC m=+11703.452576132" observedRunningTime="2026-01-22 15:09:56.546765322 +0000 UTC m=+11704.124881147" watchObservedRunningTime="2026-01-22 15:09:56.552275247 +0000 UTC m=+11704.130391092" Jan 22 15:09:59 crc kubenswrapper[4773]: I0122 15:09:59.537432 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:59 crc kubenswrapper[4773]: I0122 15:09:59.538319 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:09:59 crc kubenswrapper[4773]: I0122 15:09:59.592104 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:10:04 crc kubenswrapper[4773]: I0122 15:10:04.074797 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:10:04 crc kubenswrapper[4773]: I0122 15:10:04.075394 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:10:09 crc kubenswrapper[4773]: I0122 15:10:09.588844 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:10:09 crc kubenswrapper[4773]: I0122 15:10:09.646618 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dh6jt"] Jan 22 15:10:09 crc kubenswrapper[4773]: I0122 15:10:09.669035 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dh6jt" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="registry-server" containerID="cri-o://c5baa6370d9e85c3075030457e2c55a73be9d4f203bc53e93437780901d9c80b" gracePeriod=2 Jan 22 15:10:10 crc kubenswrapper[4773]: I0122 15:10:10.706020 4773 generic.go:334] "Generic (PLEG): container finished" podID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerID="c5baa6370d9e85c3075030457e2c55a73be9d4f203bc53e93437780901d9c80b" exitCode=0 Jan 22 15:10:10 crc kubenswrapper[4773]: I0122 15:10:10.706122 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dh6jt" event={"ID":"10ef6b01-da4c-427e-8a49-29767df69cd5","Type":"ContainerDied","Data":"c5baa6370d9e85c3075030457e2c55a73be9d4f203bc53e93437780901d9c80b"} Jan 22 15:10:10 crc kubenswrapper[4773]: I0122 15:10:10.951546 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.120034 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plwbs\" (UniqueName: \"kubernetes.io/projected/10ef6b01-da4c-427e-8a49-29767df69cd5-kube-api-access-plwbs\") pod \"10ef6b01-da4c-427e-8a49-29767df69cd5\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.120200 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-catalog-content\") pod \"10ef6b01-da4c-427e-8a49-29767df69cd5\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.120336 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-utilities\") pod \"10ef6b01-da4c-427e-8a49-29767df69cd5\" (UID: \"10ef6b01-da4c-427e-8a49-29767df69cd5\") " Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.121802 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-utilities" (OuterVolumeSpecName: "utilities") pod "10ef6b01-da4c-427e-8a49-29767df69cd5" (UID: "10ef6b01-da4c-427e-8a49-29767df69cd5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.128296 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10ef6b01-da4c-427e-8a49-29767df69cd5-kube-api-access-plwbs" (OuterVolumeSpecName: "kube-api-access-plwbs") pod "10ef6b01-da4c-427e-8a49-29767df69cd5" (UID: "10ef6b01-da4c-427e-8a49-29767df69cd5"). InnerVolumeSpecName "kube-api-access-plwbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.168225 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10ef6b01-da4c-427e-8a49-29767df69cd5" (UID: "10ef6b01-da4c-427e-8a49-29767df69cd5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.222792 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.222836 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ef6b01-da4c-427e-8a49-29767df69cd5-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.222848 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plwbs\" (UniqueName: \"kubernetes.io/projected/10ef6b01-da4c-427e-8a49-29767df69cd5-kube-api-access-plwbs\") on node \"crc\" DevicePath \"\"" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.718089 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dh6jt" event={"ID":"10ef6b01-da4c-427e-8a49-29767df69cd5","Type":"ContainerDied","Data":"61b996ef77e5f9e6489845b0bc0325107814c9392c534cd57fe1dfbfed4963bf"} Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.718416 4773 scope.go:117] "RemoveContainer" containerID="c5baa6370d9e85c3075030457e2c55a73be9d4f203bc53e93437780901d9c80b" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.718218 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dh6jt" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.752806 4773 scope.go:117] "RemoveContainer" containerID="c4dc2d82db395550fec90e17397c46b9a64b7655ffe424ef11341919e7d65771" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.780849 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dh6jt"] Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.783760 4773 scope.go:117] "RemoveContainer" containerID="65ec19b46e122bd4b46d3bcbaa94522566149051cb2a11cb86a8f62f832af133" Jan 22 15:10:11 crc kubenswrapper[4773]: I0122 15:10:11.795728 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dh6jt"] Jan 22 15:10:12 crc kubenswrapper[4773]: I0122 15:10:12.671348 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" path="/var/lib/kubelet/pods/10ef6b01-da4c-427e-8a49-29767df69cd5/volumes" Jan 22 15:10:34 crc kubenswrapper[4773]: I0122 15:10:34.074544 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:10:34 crc kubenswrapper[4773]: I0122 15:10:34.075143 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:10:34 crc kubenswrapper[4773]: I0122 15:10:34.075201 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 15:10:34 crc kubenswrapper[4773]: I0122 15:10:34.076181 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8775d63fdcde1d12deda78463190e6b3e682b644f26c578123126ce25d4c03f0"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 15:10:34 crc kubenswrapper[4773]: I0122 15:10:34.076247 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://8775d63fdcde1d12deda78463190e6b3e682b644f26c578123126ce25d4c03f0" gracePeriod=600 Jan 22 15:10:35 crc kubenswrapper[4773]: I0122 15:10:35.007874 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="8775d63fdcde1d12deda78463190e6b3e682b644f26c578123126ce25d4c03f0" exitCode=0 Jan 22 15:10:35 crc kubenswrapper[4773]: I0122 15:10:35.007957 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"8775d63fdcde1d12deda78463190e6b3e682b644f26c578123126ce25d4c03f0"} Jan 22 15:10:35 crc kubenswrapper[4773]: I0122 15:10:35.008175 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerStarted","Data":"d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e"} Jan 22 15:10:35 crc kubenswrapper[4773]: I0122 15:10:35.008201 4773 scope.go:117] "RemoveContainer" containerID="22d06c46da28ce608b8627ecc81e994c7c28fe31c0db238ad4913cd4cbccefcf" Jan 22 15:12:34 crc kubenswrapper[4773]: I0122 15:12:34.074090 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:12:34 crc kubenswrapper[4773]: I0122 15:12:34.074646 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.260605 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlx8"] Jan 22 15:12:52 crc kubenswrapper[4773]: E0122 15:12:52.261650 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="extract-content" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.261665 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="extract-content" Jan 22 15:12:52 crc kubenswrapper[4773]: E0122 15:12:52.261720 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="extract-utilities" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.261727 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="extract-utilities" Jan 22 15:12:52 crc kubenswrapper[4773]: E0122 15:12:52.261739 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="registry-server" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.261744 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="registry-server" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.261963 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="10ef6b01-da4c-427e-8a49-29767df69cd5" containerName="registry-server" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.263653 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.276375 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlx8"] Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.398898 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-utilities\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.399090 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjj9p\" (UniqueName: \"kubernetes.io/projected/b5afe44c-dca9-470b-8e7b-a79cab069def-kube-api-access-wjj9p\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.399142 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-catalog-content\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.501699 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-utilities\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.501952 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjj9p\" (UniqueName: \"kubernetes.io/projected/b5afe44c-dca9-470b-8e7b-a79cab069def-kube-api-access-wjj9p\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.502060 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-catalog-content\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.502575 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-utilities\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.502624 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-catalog-content\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.525729 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjj9p\" (UniqueName: \"kubernetes.io/projected/b5afe44c-dca9-470b-8e7b-a79cab069def-kube-api-access-wjj9p\") pod \"redhat-marketplace-vrlx8\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:52 crc kubenswrapper[4773]: I0122 15:12:52.590924 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:12:53 crc kubenswrapper[4773]: I0122 15:12:53.123009 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlx8"] Jan 22 15:12:53 crc kubenswrapper[4773]: W0122 15:12:53.125664 4773 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5afe44c_dca9_470b_8e7b_a79cab069def.slice/crio-18bea3d22b94fe8515f61abe371a308217da19f2ee387a46c37e4c1f48bcbc51 WatchSource:0}: Error finding container 18bea3d22b94fe8515f61abe371a308217da19f2ee387a46c37e4c1f48bcbc51: Status 404 returned error can't find the container with id 18bea3d22b94fe8515f61abe371a308217da19f2ee387a46c37e4c1f48bcbc51 Jan 22 15:12:53 crc kubenswrapper[4773]: I0122 15:12:53.741352 4773 generic.go:334] "Generic (PLEG): container finished" podID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerID="c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c" exitCode=0 Jan 22 15:12:53 crc kubenswrapper[4773]: I0122 15:12:53.741491 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlx8" event={"ID":"b5afe44c-dca9-470b-8e7b-a79cab069def","Type":"ContainerDied","Data":"c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c"} Jan 22 15:12:53 crc kubenswrapper[4773]: I0122 15:12:53.741722 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlx8" event={"ID":"b5afe44c-dca9-470b-8e7b-a79cab069def","Type":"ContainerStarted","Data":"18bea3d22b94fe8515f61abe371a308217da19f2ee387a46c37e4c1f48bcbc51"} Jan 22 15:12:55 crc kubenswrapper[4773]: I0122 15:12:55.771662 4773 generic.go:334] "Generic (PLEG): container finished" podID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerID="c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62" exitCode=0 Jan 22 15:12:55 crc kubenswrapper[4773]: I0122 15:12:55.771785 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlx8" event={"ID":"b5afe44c-dca9-470b-8e7b-a79cab069def","Type":"ContainerDied","Data":"c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62"} Jan 22 15:12:57 crc kubenswrapper[4773]: I0122 15:12:57.802462 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlx8" event={"ID":"b5afe44c-dca9-470b-8e7b-a79cab069def","Type":"ContainerStarted","Data":"00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816"} Jan 22 15:12:57 crc kubenswrapper[4773]: I0122 15:12:57.839108 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vrlx8" podStartSLOduration=2.977221773 podStartE2EDuration="5.83908537s" podCreationTimestamp="2026-01-22 15:12:52 +0000 UTC" firstStartedPulling="2026-01-22 15:12:53.74567188 +0000 UTC m=+11881.323787745" lastFinishedPulling="2026-01-22 15:12:56.607535517 +0000 UTC m=+11884.185651342" observedRunningTime="2026-01-22 15:12:57.829883991 +0000 UTC m=+11885.407999826" watchObservedRunningTime="2026-01-22 15:12:57.83908537 +0000 UTC m=+11885.417201205" Jan 22 15:13:02 crc kubenswrapper[4773]: I0122 15:13:02.591644 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:13:02 crc kubenswrapper[4773]: I0122 15:13:02.593214 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:13:02 crc kubenswrapper[4773]: I0122 15:13:02.678008 4773 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:13:02 crc kubenswrapper[4773]: I0122 15:13:02.971669 4773 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:13:03 crc kubenswrapper[4773]: I0122 15:13:03.040945 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlx8"] Jan 22 15:13:04 crc kubenswrapper[4773]: I0122 15:13:04.074893 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:13:04 crc kubenswrapper[4773]: I0122 15:13:04.075269 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:13:04 crc kubenswrapper[4773]: I0122 15:13:04.892981 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vrlx8" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="registry-server" containerID="cri-o://00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816" gracePeriod=2 Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.395454 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.539472 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-utilities\") pod \"b5afe44c-dca9-470b-8e7b-a79cab069def\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.539582 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjj9p\" (UniqueName: \"kubernetes.io/projected/b5afe44c-dca9-470b-8e7b-a79cab069def-kube-api-access-wjj9p\") pod \"b5afe44c-dca9-470b-8e7b-a79cab069def\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.539766 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-catalog-content\") pod \"b5afe44c-dca9-470b-8e7b-a79cab069def\" (UID: \"b5afe44c-dca9-470b-8e7b-a79cab069def\") " Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.540381 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-utilities" (OuterVolumeSpecName: "utilities") pod "b5afe44c-dca9-470b-8e7b-a79cab069def" (UID: "b5afe44c-dca9-470b-8e7b-a79cab069def"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.540786 4773 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.547377 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5afe44c-dca9-470b-8e7b-a79cab069def-kube-api-access-wjj9p" (OuterVolumeSpecName: "kube-api-access-wjj9p") pod "b5afe44c-dca9-470b-8e7b-a79cab069def" (UID: "b5afe44c-dca9-470b-8e7b-a79cab069def"). InnerVolumeSpecName "kube-api-access-wjj9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.573159 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5afe44c-dca9-470b-8e7b-a79cab069def" (UID: "b5afe44c-dca9-470b-8e7b-a79cab069def"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.643750 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjj9p\" (UniqueName: \"kubernetes.io/projected/b5afe44c-dca9-470b-8e7b-a79cab069def-kube-api-access-wjj9p\") on node \"crc\" DevicePath \"\"" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.643802 4773 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5afe44c-dca9-470b-8e7b-a79cab069def-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.907235 4773 generic.go:334] "Generic (PLEG): container finished" podID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerID="00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816" exitCode=0 Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.907310 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlx8" event={"ID":"b5afe44c-dca9-470b-8e7b-a79cab069def","Type":"ContainerDied","Data":"00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816"} Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.907352 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vrlx8" event={"ID":"b5afe44c-dca9-470b-8e7b-a79cab069def","Type":"ContainerDied","Data":"18bea3d22b94fe8515f61abe371a308217da19f2ee387a46c37e4c1f48bcbc51"} Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.907375 4773 scope.go:117] "RemoveContainer" containerID="00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.907569 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vrlx8" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.966844 4773 scope.go:117] "RemoveContainer" containerID="c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62" Jan 22 15:13:05 crc kubenswrapper[4773]: I0122 15:13:05.991617 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlx8"] Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.000267 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vrlx8"] Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.009698 4773 scope.go:117] "RemoveContainer" containerID="c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.060581 4773 scope.go:117] "RemoveContainer" containerID="00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816" Jan 22 15:13:06 crc kubenswrapper[4773]: E0122 15:13:06.061577 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816\": container with ID starting with 00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816 not found: ID does not exist" containerID="00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.061693 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816"} err="failed to get container status \"00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816\": rpc error: code = NotFound desc = could not find container \"00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816\": container with ID starting with 00410a0113b82718dcb03d98322de95d58e674ffc1c8be9af15ecfeb76a37816 not found: ID does not exist" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.061793 4773 scope.go:117] "RemoveContainer" containerID="c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62" Jan 22 15:13:06 crc kubenswrapper[4773]: E0122 15:13:06.062267 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62\": container with ID starting with c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62 not found: ID does not exist" containerID="c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.062384 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62"} err="failed to get container status \"c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62\": rpc error: code = NotFound desc = could not find container \"c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62\": container with ID starting with c95a3e43ea358e91e0a3753e241ab9958b4a2df9c9f8bac151d5ede344279e62 not found: ID does not exist" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.062413 4773 scope.go:117] "RemoveContainer" containerID="c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c" Jan 22 15:13:06 crc kubenswrapper[4773]: E0122 15:13:06.062903 4773 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c\": container with ID starting with c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c not found: ID does not exist" containerID="c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.063001 4773 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c"} err="failed to get container status \"c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c\": rpc error: code = NotFound desc = could not find container \"c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c\": container with ID starting with c801c69e045c357469b45f994d2e3d908c3ca2df862554226eb04ac55159ec5c not found: ID does not exist" Jan 22 15:13:06 crc kubenswrapper[4773]: I0122 15:13:06.673024 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" path="/var/lib/kubelet/pods/b5afe44c-dca9-470b-8e7b-a79cab069def/volumes" Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.074304 4773 patch_prober.go:28] interesting pod/machine-config-daemon-hhxm5 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.075717 4773 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.075838 4773 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.076824 4773 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e"} pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.076995 4773 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerName="machine-config-daemon" containerID="cri-o://d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" gracePeriod=600 Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.239639 4773 generic.go:334] "Generic (PLEG): container finished" podID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" exitCode=0 Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.239716 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" event={"ID":"d5a0935c-0094-42bc-a9e7-bf3fd046e23d","Type":"ContainerDied","Data":"d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e"} Jan 22 15:13:34 crc kubenswrapper[4773]: I0122 15:13:34.240027 4773 scope.go:117] "RemoveContainer" containerID="8775d63fdcde1d12deda78463190e6b3e682b644f26c578123126ce25d4c03f0" Jan 22 15:13:34 crc kubenswrapper[4773]: E0122 15:13:34.272237 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:13:35 crc kubenswrapper[4773]: I0122 15:13:35.260393 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:13:35 crc kubenswrapper[4773]: E0122 15:13:35.260837 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:13:46 crc kubenswrapper[4773]: I0122 15:13:46.659735 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:13:46 crc kubenswrapper[4773]: E0122 15:13:46.660606 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:14:01 crc kubenswrapper[4773]: I0122 15:14:01.657816 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:14:01 crc kubenswrapper[4773]: E0122 15:14:01.659701 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:14:12 crc kubenswrapper[4773]: I0122 15:14:12.668031 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:14:12 crc kubenswrapper[4773]: E0122 15:14:12.669013 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:14:17 crc kubenswrapper[4773]: I0122 15:14:17.085309 4773 scope.go:117] "RemoveContainer" containerID="3973e322404dec28a286ba519c870b92612d037ecee774b0f4aa8734aaa02f3e" Jan 22 15:14:17 crc kubenswrapper[4773]: I0122 15:14:17.113249 4773 scope.go:117] "RemoveContainer" containerID="20b59fe56dc766037f3ea18979d96a7c345df22c3fe950fdac1c4d240bf2f626" Jan 22 15:14:26 crc kubenswrapper[4773]: I0122 15:14:26.659593 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:14:26 crc kubenswrapper[4773]: E0122 15:14:26.660568 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:14:38 crc kubenswrapper[4773]: I0122 15:14:38.658361 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:14:38 crc kubenswrapper[4773]: E0122 15:14:38.659081 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:14:50 crc kubenswrapper[4773]: I0122 15:14:50.663941 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:14:50 crc kubenswrapper[4773]: E0122 15:14:50.668145 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.155888 4773 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b"] Jan 22 15:15:00 crc kubenswrapper[4773]: E0122 15:15:00.156842 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="extract-utilities" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.156855 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="extract-utilities" Jan 22 15:15:00 crc kubenswrapper[4773]: E0122 15:15:00.156899 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="extract-content" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.156905 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="extract-content" Jan 22 15:15:00 crc kubenswrapper[4773]: E0122 15:15:00.156919 4773 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="registry-server" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.156924 4773 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="registry-server" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.157129 4773 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5afe44c-dca9-470b-8e7b-a79cab069def" containerName="registry-server" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.157941 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.160030 4773 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.160246 4773 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.173673 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b"] Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.219569 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2x2w\" (UniqueName: \"kubernetes.io/projected/497eb94b-230f-4859-ac3f-f189add3dd2d-kube-api-access-f2x2w\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.219755 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/497eb94b-230f-4859-ac3f-f189add3dd2d-secret-volume\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.219794 4773 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/497eb94b-230f-4859-ac3f-f189add3dd2d-config-volume\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.321918 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/497eb94b-230f-4859-ac3f-f189add3dd2d-secret-volume\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.321983 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/497eb94b-230f-4859-ac3f-f189add3dd2d-config-volume\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.322213 4773 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2x2w\" (UniqueName: \"kubernetes.io/projected/497eb94b-230f-4859-ac3f-f189add3dd2d-kube-api-access-f2x2w\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.323140 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/497eb94b-230f-4859-ac3f-f189add3dd2d-config-volume\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.331810 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/497eb94b-230f-4859-ac3f-f189add3dd2d-secret-volume\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.338928 4773 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2x2w\" (UniqueName: \"kubernetes.io/projected/497eb94b-230f-4859-ac3f-f189add3dd2d-kube-api-access-f2x2w\") pod \"collect-profiles-29484915-wtv2b\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.490169 4773 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:00 crc kubenswrapper[4773]: I0122 15:15:00.984555 4773 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b"] Jan 22 15:15:01 crc kubenswrapper[4773]: I0122 15:15:01.209860 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" event={"ID":"497eb94b-230f-4859-ac3f-f189add3dd2d","Type":"ContainerStarted","Data":"83b54e560baff97471fa05180e6fdce7fd9c4f1c9ef31b50dd16b63fa142700c"} Jan 22 15:15:01 crc kubenswrapper[4773]: I0122 15:15:01.210207 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" event={"ID":"497eb94b-230f-4859-ac3f-f189add3dd2d","Type":"ContainerStarted","Data":"c25c93c6956b3bd67b89dca0f9d1f0b68c5e6386e39f060d07e5925d5f76cd54"} Jan 22 15:15:01 crc kubenswrapper[4773]: I0122 15:15:01.234024 4773 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" podStartSLOduration=1.234001777 podStartE2EDuration="1.234001777s" podCreationTimestamp="2026-01-22 15:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 15:15:01.223026349 +0000 UTC m=+12008.801142194" watchObservedRunningTime="2026-01-22 15:15:01.234001777 +0000 UTC m=+12008.812117602" Jan 22 15:15:02 crc kubenswrapper[4773]: I0122 15:15:02.233458 4773 generic.go:334] "Generic (PLEG): container finished" podID="497eb94b-230f-4859-ac3f-f189add3dd2d" containerID="83b54e560baff97471fa05180e6fdce7fd9c4f1c9ef31b50dd16b63fa142700c" exitCode=0 Jan 22 15:15:02 crc kubenswrapper[4773]: I0122 15:15:02.233708 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" event={"ID":"497eb94b-230f-4859-ac3f-f189add3dd2d","Type":"ContainerDied","Data":"83b54e560baff97471fa05180e6fdce7fd9c4f1c9ef31b50dd16b63fa142700c"} Jan 22 15:15:02 crc kubenswrapper[4773]: I0122 15:15:02.683968 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:15:02 crc kubenswrapper[4773]: E0122 15:15:02.684378 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.579055 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.598843 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2x2w\" (UniqueName: \"kubernetes.io/projected/497eb94b-230f-4859-ac3f-f189add3dd2d-kube-api-access-f2x2w\") pod \"497eb94b-230f-4859-ac3f-f189add3dd2d\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.598908 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/497eb94b-230f-4859-ac3f-f189add3dd2d-secret-volume\") pod \"497eb94b-230f-4859-ac3f-f189add3dd2d\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.605088 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/497eb94b-230f-4859-ac3f-f189add3dd2d-kube-api-access-f2x2w" (OuterVolumeSpecName: "kube-api-access-f2x2w") pod "497eb94b-230f-4859-ac3f-f189add3dd2d" (UID: "497eb94b-230f-4859-ac3f-f189add3dd2d"). InnerVolumeSpecName "kube-api-access-f2x2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.607957 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497eb94b-230f-4859-ac3f-f189add3dd2d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "497eb94b-230f-4859-ac3f-f189add3dd2d" (UID: "497eb94b-230f-4859-ac3f-f189add3dd2d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.701107 4773 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/497eb94b-230f-4859-ac3f-f189add3dd2d-config-volume\") pod \"497eb94b-230f-4859-ac3f-f189add3dd2d\" (UID: \"497eb94b-230f-4859-ac3f-f189add3dd2d\") " Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.701669 4773 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2x2w\" (UniqueName: \"kubernetes.io/projected/497eb94b-230f-4859-ac3f-f189add3dd2d-kube-api-access-f2x2w\") on node \"crc\" DevicePath \"\"" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.701694 4773 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/497eb94b-230f-4859-ac3f-f189add3dd2d-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.701897 4773 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/497eb94b-230f-4859-ac3f-f189add3dd2d-config-volume" (OuterVolumeSpecName: "config-volume") pod "497eb94b-230f-4859-ac3f-f189add3dd2d" (UID: "497eb94b-230f-4859-ac3f-f189add3dd2d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 15:15:03 crc kubenswrapper[4773]: I0122 15:15:03.803359 4773 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/497eb94b-230f-4859-ac3f-f189add3dd2d-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 15:15:04 crc kubenswrapper[4773]: I0122 15:15:04.259520 4773 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" event={"ID":"497eb94b-230f-4859-ac3f-f189add3dd2d","Type":"ContainerDied","Data":"c25c93c6956b3bd67b89dca0f9d1f0b68c5e6386e39f060d07e5925d5f76cd54"} Jan 22 15:15:04 crc kubenswrapper[4773]: I0122 15:15:04.259578 4773 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c25c93c6956b3bd67b89dca0f9d1f0b68c5e6386e39f060d07e5925d5f76cd54" Jan 22 15:15:04 crc kubenswrapper[4773]: I0122 15:15:04.259644 4773 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484915-wtv2b" Jan 22 15:15:04 crc kubenswrapper[4773]: I0122 15:15:04.329194 4773 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp"] Jan 22 15:15:04 crc kubenswrapper[4773]: I0122 15:15:04.337845 4773 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484870-5g6kp"] Jan 22 15:15:04 crc kubenswrapper[4773]: I0122 15:15:04.682354 4773 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc2ed8a9-37f2-4e23-8937-945c879bf347" path="/var/lib/kubelet/pods/fc2ed8a9-37f2-4e23-8937-945c879bf347/volumes" Jan 22 15:15:15 crc kubenswrapper[4773]: I0122 15:15:15.657724 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:15:15 crc kubenswrapper[4773]: E0122 15:15:15.658925 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" Jan 22 15:15:17 crc kubenswrapper[4773]: I0122 15:15:17.182462 4773 scope.go:117] "RemoveContainer" containerID="2cecddfbca3f291b30e8833f732b34dc39d94274cfad33ae4a884d783f761649" Jan 22 15:15:17 crc kubenswrapper[4773]: I0122 15:15:17.208302 4773 scope.go:117] "RemoveContainer" containerID="ab5969e00a4fd6f9b4cb7a66aac2ef5b72a9a3eeb71759f4f3324372b5d2e412" Jan 22 15:15:28 crc kubenswrapper[4773]: I0122 15:15:28.659434 4773 scope.go:117] "RemoveContainer" containerID="d2cbf053f518c2712609b9c66b14729fc3cda251af390a830ad262a0a865163e" Jan 22 15:15:28 crc kubenswrapper[4773]: E0122 15:15:28.660054 4773 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-hhxm5_openshift-machine-config-operator(d5a0935c-0094-42bc-a9e7-bf3fd046e23d)\"" pod="openshift-machine-config-operator/machine-config-daemon-hhxm5" podUID="d5a0935c-0094-42bc-a9e7-bf3fd046e23d" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134437444024456 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134437444017373 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134407453016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134407453015463 5ustar corecore